1 // Copyright 2013 the V8 project authors. All rights reserved. 2 // Redistribution and use in source and binary forms, with or without 3 // modification, are permitted provided that the following conditions are 4 // met: 5 // 6 // * Redistributions of source code must retain the above copyright 7 // notice, this list of conditions and the following disclaimer. 8 // * Redistributions in binary form must reproduce the above 9 // copyright notice, this list of conditions and the following 10 // disclaimer in the documentation and/or other materials provided 11 // with the distribution. 12 // * Neither the name of Google Inc. nor the names of its 13 // contributors may be used to endorse or promote products derived 14 // from this software without specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28 #include "hydrogen.h" 29 30 #include <algorithm> 31 32 #include "v8.h" 33 #include "allocation-site-scopes.h" 34 #include "codegen.h" 35 #include "full-codegen.h" 36 #include "hashmap.h" 37 #include "hydrogen-bce.h" 38 #include "hydrogen-bch.h" 39 #include "hydrogen-canonicalize.h" 40 #include "hydrogen-check-elimination.h" 41 #include "hydrogen-dce.h" 42 #include "hydrogen-dehoist.h" 43 #include "hydrogen-environment-liveness.h" 44 #include "hydrogen-escape-analysis.h" 45 #include "hydrogen-infer-representation.h" 46 #include "hydrogen-infer-types.h" 47 #include "hydrogen-load-elimination.h" 48 #include "hydrogen-gvn.h" 49 #include "hydrogen-mark-deoptimize.h" 50 #include "hydrogen-mark-unreachable.h" 51 #include "hydrogen-minus-zero.h" 52 #include "hydrogen-osr.h" 53 #include "hydrogen-range-analysis.h" 54 #include "hydrogen-redundant-phi.h" 55 #include "hydrogen-removable-simulates.h" 56 #include "hydrogen-representation-changes.h" 57 #include "hydrogen-sce.h" 58 #include "hydrogen-uint32-analysis.h" 59 #include "lithium-allocator.h" 60 #include "parser.h" 61 #include "runtime.h" 62 #include "scopeinfo.h" 63 #include "scopes.h" 64 #include "stub-cache.h" 65 #include "typing.h" 66 67 #if V8_TARGET_ARCH_IA32 68 #include "ia32/lithium-codegen-ia32.h" 69 #elif V8_TARGET_ARCH_X64 70 #include "x64/lithium-codegen-x64.h" 71 #elif V8_TARGET_ARCH_ARM 72 #include "arm/lithium-codegen-arm.h" 73 #elif V8_TARGET_ARCH_MIPS 74 #include "mips/lithium-codegen-mips.h" 75 #else 76 #error Unsupported target architecture. 77 #endif 78 79 namespace v8 { 80 namespace internal { 81 82 HBasicBlock::HBasicBlock(HGraph* graph) 83 : block_id_(graph->GetNextBlockID()), 84 graph_(graph), 85 phis_(4, graph->zone()), 86 first_(NULL), 87 last_(NULL), 88 end_(NULL), 89 loop_information_(NULL), 90 predecessors_(2, graph->zone()), 91 dominator_(NULL), 92 dominated_blocks_(4, graph->zone()), 93 last_environment_(NULL), 94 argument_count_(-1), 95 first_instruction_index_(-1), 96 last_instruction_index_(-1), 97 deleted_phis_(4, graph->zone()), 98 parent_loop_header_(NULL), 99 inlined_entry_block_(NULL), 100 is_inline_return_target_(false), 101 is_reachable_(true), 102 dominates_loop_successors_(false), 103 is_osr_entry_(false) { } 104 105 106 Isolate* HBasicBlock::isolate() const { 107 return graph_->isolate(); 108 } 109 110 111 void HBasicBlock::MarkUnreachable() { 112 is_reachable_ = false; 113 } 114 115 116 void HBasicBlock::AttachLoopInformation() { 117 ASSERT(!IsLoopHeader()); 118 loop_information_ = new(zone()) HLoopInformation(this, zone()); 119 } 120 121 122 void HBasicBlock::DetachLoopInformation() { 123 ASSERT(IsLoopHeader()); 124 loop_information_ = NULL; 125 } 126 127 128 void HBasicBlock::AddPhi(HPhi* phi) { 129 ASSERT(!IsStartBlock()); 130 phis_.Add(phi, zone()); 131 phi->SetBlock(this); 132 } 133 134 135 void HBasicBlock::RemovePhi(HPhi* phi) { 136 ASSERT(phi->block() == this); 137 ASSERT(phis_.Contains(phi)); 138 phi->Kill(); 139 phis_.RemoveElement(phi); 140 phi->SetBlock(NULL); 141 } 142 143 144 void HBasicBlock::AddInstruction(HInstruction* instr, int position) { 145 ASSERT(!IsStartBlock() || !IsFinished()); 146 ASSERT(!instr->IsLinked()); 147 ASSERT(!IsFinished()); 148 149 if (position != RelocInfo::kNoPosition) { 150 instr->set_position(position); 151 } 152 if (first_ == NULL) { 153 ASSERT(last_environment() != NULL); 154 ASSERT(!last_environment()->ast_id().IsNone()); 155 HBlockEntry* entry = new(zone()) HBlockEntry(); 156 entry->InitializeAsFirst(this); 157 if (position != RelocInfo::kNoPosition) { 158 entry->set_position(position); 159 } else { 160 ASSERT(!FLAG_emit_opt_code_positions || 161 !graph()->info()->IsOptimizing()); 162 } 163 first_ = last_ = entry; 164 } 165 instr->InsertAfter(last_); 166 } 167 168 169 HPhi* HBasicBlock::AddNewPhi(int merged_index) { 170 if (graph()->IsInsideNoSideEffectsScope()) { 171 merged_index = HPhi::kInvalidMergedIndex; 172 } 173 HPhi* phi = new(zone()) HPhi(merged_index, zone()); 174 AddPhi(phi); 175 return phi; 176 } 177 178 179 HSimulate* HBasicBlock::CreateSimulate(BailoutId ast_id, 180 RemovableSimulate removable) { 181 ASSERT(HasEnvironment()); 182 HEnvironment* environment = last_environment(); 183 ASSERT(ast_id.IsNone() || 184 ast_id == BailoutId::StubEntry() || 185 environment->closure()->shared()->VerifyBailoutId(ast_id)); 186 187 int push_count = environment->push_count(); 188 int pop_count = environment->pop_count(); 189 190 HSimulate* instr = 191 new(zone()) HSimulate(ast_id, pop_count, zone(), removable); 192 #ifdef DEBUG 193 instr->set_closure(environment->closure()); 194 #endif 195 // Order of pushed values: newest (top of stack) first. This allows 196 // HSimulate::MergeWith() to easily append additional pushed values 197 // that are older (from further down the stack). 198 for (int i = 0; i < push_count; ++i) { 199 instr->AddPushedValue(environment->ExpressionStackAt(i)); 200 } 201 for (GrowableBitVector::Iterator it(environment->assigned_variables(), 202 zone()); 203 !it.Done(); 204 it.Advance()) { 205 int index = it.Current(); 206 instr->AddAssignedValue(index, environment->Lookup(index)); 207 } 208 environment->ClearHistory(); 209 return instr; 210 } 211 212 213 void HBasicBlock::Finish(HControlInstruction* end, int position) { 214 ASSERT(!IsFinished()); 215 AddInstruction(end, position); 216 end_ = end; 217 for (HSuccessorIterator it(end); !it.Done(); it.Advance()) { 218 it.Current()->RegisterPredecessor(this); 219 } 220 } 221 222 223 void HBasicBlock::Goto(HBasicBlock* block, 224 int position, 225 FunctionState* state, 226 bool add_simulate) { 227 bool drop_extra = state != NULL && 228 state->inlining_kind() == DROP_EXTRA_ON_RETURN; 229 230 if (block->IsInlineReturnTarget()) { 231 HEnvironment* env = last_environment(); 232 int argument_count = env->arguments_environment()->parameter_count(); 233 AddInstruction(new(zone()) 234 HLeaveInlined(state->entry(), argument_count), 235 position); 236 UpdateEnvironment(last_environment()->DiscardInlined(drop_extra)); 237 } 238 239 if (add_simulate) AddNewSimulate(BailoutId::None(), position); 240 HGoto* instr = new(zone()) HGoto(block); 241 Finish(instr, position); 242 } 243 244 245 void HBasicBlock::AddLeaveInlined(HValue* return_value, 246 FunctionState* state, 247 int position) { 248 HBasicBlock* target = state->function_return(); 249 bool drop_extra = state->inlining_kind() == DROP_EXTRA_ON_RETURN; 250 251 ASSERT(target->IsInlineReturnTarget()); 252 ASSERT(return_value != NULL); 253 HEnvironment* env = last_environment(); 254 int argument_count = env->arguments_environment()->parameter_count(); 255 AddInstruction(new(zone()) HLeaveInlined(state->entry(), argument_count), 256 position); 257 UpdateEnvironment(last_environment()->DiscardInlined(drop_extra)); 258 last_environment()->Push(return_value); 259 AddNewSimulate(BailoutId::None(), position); 260 HGoto* instr = new(zone()) HGoto(target); 261 Finish(instr, position); 262 } 263 264 265 void HBasicBlock::SetInitialEnvironment(HEnvironment* env) { 266 ASSERT(!HasEnvironment()); 267 ASSERT(first() == NULL); 268 UpdateEnvironment(env); 269 } 270 271 272 void HBasicBlock::UpdateEnvironment(HEnvironment* env) { 273 last_environment_ = env; 274 graph()->update_maximum_environment_size(env->first_expression_index()); 275 } 276 277 278 void HBasicBlock::SetJoinId(BailoutId ast_id) { 279 int length = predecessors_.length(); 280 ASSERT(length > 0); 281 for (int i = 0; i < length; i++) { 282 HBasicBlock* predecessor = predecessors_[i]; 283 ASSERT(predecessor->end()->IsGoto()); 284 HSimulate* simulate = HSimulate::cast(predecessor->end()->previous()); 285 ASSERT(i != 0 || 286 (predecessor->last_environment()->closure().is_null() || 287 predecessor->last_environment()->closure()->shared() 288 ->VerifyBailoutId(ast_id))); 289 simulate->set_ast_id(ast_id); 290 predecessor->last_environment()->set_ast_id(ast_id); 291 } 292 } 293 294 295 bool HBasicBlock::Dominates(HBasicBlock* other) const { 296 HBasicBlock* current = other->dominator(); 297 while (current != NULL) { 298 if (current == this) return true; 299 current = current->dominator(); 300 } 301 return false; 302 } 303 304 305 int HBasicBlock::LoopNestingDepth() const { 306 const HBasicBlock* current = this; 307 int result = (current->IsLoopHeader()) ? 1 : 0; 308 while (current->parent_loop_header() != NULL) { 309 current = current->parent_loop_header(); 310 result++; 311 } 312 return result; 313 } 314 315 316 void HBasicBlock::PostProcessLoopHeader(IterationStatement* stmt) { 317 ASSERT(IsLoopHeader()); 318 319 SetJoinId(stmt->EntryId()); 320 if (predecessors()->length() == 1) { 321 // This is a degenerated loop. 322 DetachLoopInformation(); 323 return; 324 } 325 326 // Only the first entry into the loop is from outside the loop. All other 327 // entries must be back edges. 328 for (int i = 1; i < predecessors()->length(); ++i) { 329 loop_information()->RegisterBackEdge(predecessors()->at(i)); 330 } 331 } 332 333 334 void HBasicBlock::RegisterPredecessor(HBasicBlock* pred) { 335 if (HasPredecessor()) { 336 // Only loop header blocks can have a predecessor added after 337 // instructions have been added to the block (they have phis for all 338 // values in the environment, these phis may be eliminated later). 339 ASSERT(IsLoopHeader() || first_ == NULL); 340 HEnvironment* incoming_env = pred->last_environment(); 341 if (IsLoopHeader()) { 342 ASSERT(phis()->length() == incoming_env->length()); 343 for (int i = 0; i < phis_.length(); ++i) { 344 phis_[i]->AddInput(incoming_env->values()->at(i)); 345 } 346 } else { 347 last_environment()->AddIncomingEdge(this, pred->last_environment()); 348 } 349 } else if (!HasEnvironment() && !IsFinished()) { 350 ASSERT(!IsLoopHeader()); 351 SetInitialEnvironment(pred->last_environment()->Copy()); 352 } 353 354 predecessors_.Add(pred, zone()); 355 } 356 357 358 void HBasicBlock::AddDominatedBlock(HBasicBlock* block) { 359 ASSERT(!dominated_blocks_.Contains(block)); 360 // Keep the list of dominated blocks sorted such that if there is two 361 // succeeding block in this list, the predecessor is before the successor. 362 int index = 0; 363 while (index < dominated_blocks_.length() && 364 dominated_blocks_[index]->block_id() < block->block_id()) { 365 ++index; 366 } 367 dominated_blocks_.InsertAt(index, block, zone()); 368 } 369 370 371 void HBasicBlock::AssignCommonDominator(HBasicBlock* other) { 372 if (dominator_ == NULL) { 373 dominator_ = other; 374 other->AddDominatedBlock(this); 375 } else if (other->dominator() != NULL) { 376 HBasicBlock* first = dominator_; 377 HBasicBlock* second = other; 378 379 while (first != second) { 380 if (first->block_id() > second->block_id()) { 381 first = first->dominator(); 382 } else { 383 second = second->dominator(); 384 } 385 ASSERT(first != NULL && second != NULL); 386 } 387 388 if (dominator_ != first) { 389 ASSERT(dominator_->dominated_blocks_.Contains(this)); 390 dominator_->dominated_blocks_.RemoveElement(this); 391 dominator_ = first; 392 first->AddDominatedBlock(this); 393 } 394 } 395 } 396 397 398 void HBasicBlock::AssignLoopSuccessorDominators() { 399 // Mark blocks that dominate all subsequent reachable blocks inside their 400 // loop. Exploit the fact that blocks are sorted in reverse post order. When 401 // the loop is visited in increasing block id order, if the number of 402 // non-loop-exiting successor edges at the dominator_candidate block doesn't 403 // exceed the number of previously encountered predecessor edges, there is no 404 // path from the loop header to any block with higher id that doesn't go 405 // through the dominator_candidate block. In this case, the 406 // dominator_candidate block is guaranteed to dominate all blocks reachable 407 // from it with higher ids. 408 HBasicBlock* last = loop_information()->GetLastBackEdge(); 409 int outstanding_successors = 1; // one edge from the pre-header 410 // Header always dominates everything. 411 MarkAsLoopSuccessorDominator(); 412 for (int j = block_id(); j <= last->block_id(); ++j) { 413 HBasicBlock* dominator_candidate = graph_->blocks()->at(j); 414 for (HPredecessorIterator it(dominator_candidate); !it.Done(); 415 it.Advance()) { 416 HBasicBlock* predecessor = it.Current(); 417 // Don't count back edges. 418 if (predecessor->block_id() < dominator_candidate->block_id()) { 419 outstanding_successors--; 420 } 421 } 422 423 // If more successors than predecessors have been seen in the loop up to 424 // now, it's not possible to guarantee that the current block dominates 425 // all of the blocks with higher IDs. In this case, assume conservatively 426 // that those paths through loop that don't go through the current block 427 // contain all of the loop's dependencies. Also be careful to record 428 // dominator information about the current loop that's being processed, 429 // and not nested loops, which will be processed when 430 // AssignLoopSuccessorDominators gets called on their header. 431 ASSERT(outstanding_successors >= 0); 432 HBasicBlock* parent_loop_header = dominator_candidate->parent_loop_header(); 433 if (outstanding_successors == 0 && 434 (parent_loop_header == this && !dominator_candidate->IsLoopHeader())) { 435 dominator_candidate->MarkAsLoopSuccessorDominator(); 436 } 437 HControlInstruction* end = dominator_candidate->end(); 438 for (HSuccessorIterator it(end); !it.Done(); it.Advance()) { 439 HBasicBlock* successor = it.Current(); 440 // Only count successors that remain inside the loop and don't loop back 441 // to a loop header. 442 if (successor->block_id() > dominator_candidate->block_id() && 443 successor->block_id() <= last->block_id()) { 444 // Backwards edges must land on loop headers. 445 ASSERT(successor->block_id() > dominator_candidate->block_id() || 446 successor->IsLoopHeader()); 447 outstanding_successors++; 448 } 449 } 450 } 451 } 452 453 454 int HBasicBlock::PredecessorIndexOf(HBasicBlock* predecessor) const { 455 for (int i = 0; i < predecessors_.length(); ++i) { 456 if (predecessors_[i] == predecessor) return i; 457 } 458 UNREACHABLE(); 459 return -1; 460 } 461 462 463 #ifdef DEBUG 464 void HBasicBlock::Verify() { 465 // Check that every block is finished. 466 ASSERT(IsFinished()); 467 ASSERT(block_id() >= 0); 468 469 // Check that the incoming edges are in edge split form. 470 if (predecessors_.length() > 1) { 471 for (int i = 0; i < predecessors_.length(); ++i) { 472 ASSERT(predecessors_[i]->end()->SecondSuccessor() == NULL); 473 } 474 } 475 } 476 #endif 477 478 479 void HLoopInformation::RegisterBackEdge(HBasicBlock* block) { 480 this->back_edges_.Add(block, block->zone()); 481 AddBlock(block); 482 } 483 484 485 HBasicBlock* HLoopInformation::GetLastBackEdge() const { 486 int max_id = -1; 487 HBasicBlock* result = NULL; 488 for (int i = 0; i < back_edges_.length(); ++i) { 489 HBasicBlock* cur = back_edges_[i]; 490 if (cur->block_id() > max_id) { 491 max_id = cur->block_id(); 492 result = cur; 493 } 494 } 495 return result; 496 } 497 498 499 void HLoopInformation::AddBlock(HBasicBlock* block) { 500 if (block == loop_header()) return; 501 if (block->parent_loop_header() == loop_header()) return; 502 if (block->parent_loop_header() != NULL) { 503 AddBlock(block->parent_loop_header()); 504 } else { 505 block->set_parent_loop_header(loop_header()); 506 blocks_.Add(block, block->zone()); 507 for (int i = 0; i < block->predecessors()->length(); ++i) { 508 AddBlock(block->predecessors()->at(i)); 509 } 510 } 511 } 512 513 514 #ifdef DEBUG 515 516 // Checks reachability of the blocks in this graph and stores a bit in 517 // the BitVector "reachable()" for every block that can be reached 518 // from the start block of the graph. If "dont_visit" is non-null, the given 519 // block is treated as if it would not be part of the graph. "visited_count()" 520 // returns the number of reachable blocks. 521 class ReachabilityAnalyzer BASE_EMBEDDED { 522 public: 523 ReachabilityAnalyzer(HBasicBlock* entry_block, 524 int block_count, 525 HBasicBlock* dont_visit) 526 : visited_count_(0), 527 stack_(16, entry_block->zone()), 528 reachable_(block_count, entry_block->zone()), 529 dont_visit_(dont_visit) { 530 PushBlock(entry_block); 531 Analyze(); 532 } 533 534 int visited_count() const { return visited_count_; } 535 const BitVector* reachable() const { return &reachable_; } 536 537 private: 538 void PushBlock(HBasicBlock* block) { 539 if (block != NULL && block != dont_visit_ && 540 !reachable_.Contains(block->block_id())) { 541 reachable_.Add(block->block_id()); 542 stack_.Add(block, block->zone()); 543 visited_count_++; 544 } 545 } 546 547 void Analyze() { 548 while (!stack_.is_empty()) { 549 HControlInstruction* end = stack_.RemoveLast()->end(); 550 for (HSuccessorIterator it(end); !it.Done(); it.Advance()) { 551 PushBlock(it.Current()); 552 } 553 } 554 } 555 556 int visited_count_; 557 ZoneList<HBasicBlock*> stack_; 558 BitVector reachable_; 559 HBasicBlock* dont_visit_; 560 }; 561 562 563 void HGraph::Verify(bool do_full_verify) const { 564 Heap::RelocationLock relocation_lock(isolate()->heap()); 565 AllowHandleDereference allow_deref; 566 AllowDeferredHandleDereference allow_deferred_deref; 567 for (int i = 0; i < blocks_.length(); i++) { 568 HBasicBlock* block = blocks_.at(i); 569 570 block->Verify(); 571 572 // Check that every block contains at least one node and that only the last 573 // node is a control instruction. 574 HInstruction* current = block->first(); 575 ASSERT(current != NULL && current->IsBlockEntry()); 576 while (current != NULL) { 577 ASSERT((current->next() == NULL) == current->IsControlInstruction()); 578 ASSERT(current->block() == block); 579 current->Verify(); 580 current = current->next(); 581 } 582 583 // Check that successors are correctly set. 584 HBasicBlock* first = block->end()->FirstSuccessor(); 585 HBasicBlock* second = block->end()->SecondSuccessor(); 586 ASSERT(second == NULL || first != NULL); 587 588 // Check that the predecessor array is correct. 589 if (first != NULL) { 590 ASSERT(first->predecessors()->Contains(block)); 591 if (second != NULL) { 592 ASSERT(second->predecessors()->Contains(block)); 593 } 594 } 595 596 // Check that phis have correct arguments. 597 for (int j = 0; j < block->phis()->length(); j++) { 598 HPhi* phi = block->phis()->at(j); 599 phi->Verify(); 600 } 601 602 // Check that all join blocks have predecessors that end with an 603 // unconditional goto and agree on their environment node id. 604 if (block->predecessors()->length() >= 2) { 605 BailoutId id = 606 block->predecessors()->first()->last_environment()->ast_id(); 607 for (int k = 0; k < block->predecessors()->length(); k++) { 608 HBasicBlock* predecessor = block->predecessors()->at(k); 609 ASSERT(predecessor->end()->IsGoto() || 610 predecessor->end()->IsDeoptimize()); 611 ASSERT(predecessor->last_environment()->ast_id() == id); 612 } 613 } 614 } 615 616 // Check special property of first block to have no predecessors. 617 ASSERT(blocks_.at(0)->predecessors()->is_empty()); 618 619 if (do_full_verify) { 620 // Check that the graph is fully connected. 621 ReachabilityAnalyzer analyzer(entry_block_, blocks_.length(), NULL); 622 ASSERT(analyzer.visited_count() == blocks_.length()); 623 624 // Check that entry block dominator is NULL. 625 ASSERT(entry_block_->dominator() == NULL); 626 627 // Check dominators. 628 for (int i = 0; i < blocks_.length(); ++i) { 629 HBasicBlock* block = blocks_.at(i); 630 if (block->dominator() == NULL) { 631 // Only start block may have no dominator assigned to. 632 ASSERT(i == 0); 633 } else { 634 // Assert that block is unreachable if dominator must not be visited. 635 ReachabilityAnalyzer dominator_analyzer(entry_block_, 636 blocks_.length(), 637 block->dominator()); 638 ASSERT(!dominator_analyzer.reachable()->Contains(block->block_id())); 639 } 640 } 641 } 642 } 643 644 #endif 645 646 647 HConstant* HGraph::GetConstant(SetOncePointer<HConstant>* pointer, 648 int32_t value) { 649 if (!pointer->is_set()) { 650 // Can't pass GetInvalidContext() to HConstant::New, because that will 651 // recursively call GetConstant 652 HConstant* constant = HConstant::New(zone(), NULL, value); 653 constant->InsertAfter(entry_block()->first()); 654 pointer->set(constant); 655 return constant; 656 } 657 return ReinsertConstantIfNecessary(pointer->get()); 658 } 659 660 661 HConstant* HGraph::ReinsertConstantIfNecessary(HConstant* constant) { 662 if (!constant->IsLinked()) { 663 // The constant was removed from the graph. Reinsert. 664 constant->ClearFlag(HValue::kIsDead); 665 constant->InsertAfter(entry_block()->first()); 666 } 667 return constant; 668 } 669 670 671 HConstant* HGraph::GetConstant0() { 672 return GetConstant(&constant_0_, 0); 673 } 674 675 676 HConstant* HGraph::GetConstant1() { 677 return GetConstant(&constant_1_, 1); 678 } 679 680 681 HConstant* HGraph::GetConstantMinus1() { 682 return GetConstant(&constant_minus1_, -1); 683 } 684 685 686 #define DEFINE_GET_CONSTANT(Name, name, htype, boolean_value) \ 687 HConstant* HGraph::GetConstant##Name() { \ 688 if (!constant_##name##_.is_set()) { \ 689 HConstant* constant = new(zone()) HConstant( \ 690 Unique<Object>::CreateImmovable(isolate()->factory()->name##_value()), \ 691 Representation::Tagged(), \ 692 htype, \ 693 false, \ 694 true, \ 695 false, \ 696 boolean_value); \ 697 constant->InsertAfter(entry_block()->first()); \ 698 constant_##name##_.set(constant); \ 699 } \ 700 return ReinsertConstantIfNecessary(constant_##name##_.get()); \ 701 } 702 703 704 DEFINE_GET_CONSTANT(Undefined, undefined, HType::Tagged(), false) 705 DEFINE_GET_CONSTANT(True, true, HType::Boolean(), true) 706 DEFINE_GET_CONSTANT(False, false, HType::Boolean(), false) 707 DEFINE_GET_CONSTANT(Hole, the_hole, HType::Tagged(), false) 708 DEFINE_GET_CONSTANT(Null, null, HType::Tagged(), false) 709 710 711 #undef DEFINE_GET_CONSTANT 712 713 #define DEFINE_IS_CONSTANT(Name, name) \ 714 bool HGraph::IsConstant##Name(HConstant* constant) { \ 715 return constant_##name##_.is_set() && constant == constant_##name##_.get(); \ 716 } 717 DEFINE_IS_CONSTANT(Undefined, undefined) 718 DEFINE_IS_CONSTANT(0, 0) 719 DEFINE_IS_CONSTANT(1, 1) 720 DEFINE_IS_CONSTANT(Minus1, minus1) 721 DEFINE_IS_CONSTANT(True, true) 722 DEFINE_IS_CONSTANT(False, false) 723 DEFINE_IS_CONSTANT(Hole, the_hole) 724 DEFINE_IS_CONSTANT(Null, null) 725 726 #undef DEFINE_IS_CONSTANT 727 728 729 HConstant* HGraph::GetInvalidContext() { 730 return GetConstant(&constant_invalid_context_, 0xFFFFC0C7); 731 } 732 733 734 bool HGraph::IsStandardConstant(HConstant* constant) { 735 if (IsConstantUndefined(constant)) return true; 736 if (IsConstant0(constant)) return true; 737 if (IsConstant1(constant)) return true; 738 if (IsConstantMinus1(constant)) return true; 739 if (IsConstantTrue(constant)) return true; 740 if (IsConstantFalse(constant)) return true; 741 if (IsConstantHole(constant)) return true; 742 if (IsConstantNull(constant)) return true; 743 return false; 744 } 745 746 747 HGraphBuilder::IfBuilder::IfBuilder(HGraphBuilder* builder) 748 : builder_(builder), 749 finished_(false), 750 did_then_(false), 751 did_else_(false), 752 did_else_if_(false), 753 did_and_(false), 754 did_or_(false), 755 captured_(false), 756 needs_compare_(true), 757 pending_merge_block_(false), 758 split_edge_merge_block_(NULL), 759 merge_at_join_blocks_(NULL), 760 normal_merge_at_join_block_count_(0), 761 deopt_merge_at_join_block_count_(0) { 762 HEnvironment* env = builder->environment(); 763 first_true_block_ = builder->CreateBasicBlock(env->Copy()); 764 first_false_block_ = builder->CreateBasicBlock(env->Copy()); 765 } 766 767 768 HGraphBuilder::IfBuilder::IfBuilder( 769 HGraphBuilder* builder, 770 HIfContinuation* continuation) 771 : builder_(builder), 772 finished_(false), 773 did_then_(false), 774 did_else_(false), 775 did_else_if_(false), 776 did_and_(false), 777 did_or_(false), 778 captured_(false), 779 needs_compare_(false), 780 pending_merge_block_(false), 781 first_true_block_(NULL), 782 first_false_block_(NULL), 783 split_edge_merge_block_(NULL), 784 merge_at_join_blocks_(NULL), 785 normal_merge_at_join_block_count_(0), 786 deopt_merge_at_join_block_count_(0) { 787 continuation->Continue(&first_true_block_, 788 &first_false_block_); 789 } 790 791 792 HControlInstruction* HGraphBuilder::IfBuilder::AddCompare( 793 HControlInstruction* compare) { 794 ASSERT(did_then_ == did_else_); 795 if (did_else_) { 796 // Handle if-then-elseif 797 did_else_if_ = true; 798 did_else_ = false; 799 did_then_ = false; 800 did_and_ = false; 801 did_or_ = false; 802 pending_merge_block_ = false; 803 split_edge_merge_block_ = NULL; 804 HEnvironment* env = builder_->environment(); 805 first_true_block_ = builder_->CreateBasicBlock(env->Copy()); 806 first_false_block_ = builder_->CreateBasicBlock(env->Copy()); 807 } 808 if (split_edge_merge_block_ != NULL) { 809 HEnvironment* env = first_false_block_->last_environment(); 810 HBasicBlock* split_edge = 811 builder_->CreateBasicBlock(env->Copy()); 812 if (did_or_) { 813 compare->SetSuccessorAt(0, split_edge); 814 compare->SetSuccessorAt(1, first_false_block_); 815 } else { 816 compare->SetSuccessorAt(0, first_true_block_); 817 compare->SetSuccessorAt(1, split_edge); 818 } 819 builder_->GotoNoSimulate(split_edge, split_edge_merge_block_); 820 } else { 821 compare->SetSuccessorAt(0, first_true_block_); 822 compare->SetSuccessorAt(1, first_false_block_); 823 } 824 builder_->FinishCurrentBlock(compare); 825 needs_compare_ = false; 826 return compare; 827 } 828 829 830 void HGraphBuilder::IfBuilder::Or() { 831 ASSERT(!needs_compare_); 832 ASSERT(!did_and_); 833 did_or_ = true; 834 HEnvironment* env = first_false_block_->last_environment(); 835 if (split_edge_merge_block_ == NULL) { 836 split_edge_merge_block_ = 837 builder_->CreateBasicBlock(env->Copy()); 838 builder_->GotoNoSimulate(first_true_block_, split_edge_merge_block_); 839 first_true_block_ = split_edge_merge_block_; 840 } 841 builder_->set_current_block(first_false_block_); 842 first_false_block_ = builder_->CreateBasicBlock(env->Copy()); 843 } 844 845 846 void HGraphBuilder::IfBuilder::And() { 847 ASSERT(!needs_compare_); 848 ASSERT(!did_or_); 849 did_and_ = true; 850 HEnvironment* env = first_false_block_->last_environment(); 851 if (split_edge_merge_block_ == NULL) { 852 split_edge_merge_block_ = builder_->CreateBasicBlock(env->Copy()); 853 builder_->GotoNoSimulate(first_false_block_, split_edge_merge_block_); 854 first_false_block_ = split_edge_merge_block_; 855 } 856 builder_->set_current_block(first_true_block_); 857 first_true_block_ = builder_->CreateBasicBlock(env->Copy()); 858 } 859 860 861 void HGraphBuilder::IfBuilder::CaptureContinuation( 862 HIfContinuation* continuation) { 863 ASSERT(!did_else_if_); 864 ASSERT(!finished_); 865 ASSERT(!captured_); 866 867 HBasicBlock* true_block = NULL; 868 HBasicBlock* false_block = NULL; 869 Finish(&true_block, &false_block); 870 ASSERT(true_block != NULL); 871 ASSERT(false_block != NULL); 872 continuation->Capture(true_block, false_block); 873 captured_ = true; 874 builder_->set_current_block(NULL); 875 End(); 876 } 877 878 879 void HGraphBuilder::IfBuilder::JoinContinuation(HIfContinuation* continuation) { 880 ASSERT(!did_else_if_); 881 ASSERT(!finished_); 882 ASSERT(!captured_); 883 HBasicBlock* true_block = NULL; 884 HBasicBlock* false_block = NULL; 885 Finish(&true_block, &false_block); 886 merge_at_join_blocks_ = NULL; 887 if (true_block != NULL && !true_block->IsFinished()) { 888 ASSERT(continuation->IsTrueReachable()); 889 builder_->GotoNoSimulate(true_block, continuation->true_branch()); 890 } 891 if (false_block != NULL && !false_block->IsFinished()) { 892 ASSERT(continuation->IsFalseReachable()); 893 builder_->GotoNoSimulate(false_block, continuation->false_branch()); 894 } 895 captured_ = true; 896 End(); 897 } 898 899 900 void HGraphBuilder::IfBuilder::Then() { 901 ASSERT(!captured_); 902 ASSERT(!finished_); 903 did_then_ = true; 904 if (needs_compare_) { 905 // Handle if's without any expressions, they jump directly to the "else" 906 // branch. However, we must pretend that the "then" branch is reachable, 907 // so that the graph builder visits it and sees any live range extending 908 // constructs within it. 909 HConstant* constant_false = builder_->graph()->GetConstantFalse(); 910 ToBooleanStub::Types boolean_type = ToBooleanStub::Types(); 911 boolean_type.Add(ToBooleanStub::BOOLEAN); 912 HBranch* branch = builder()->New<HBranch>( 913 constant_false, boolean_type, first_true_block_, first_false_block_); 914 builder_->FinishCurrentBlock(branch); 915 } 916 builder_->set_current_block(first_true_block_); 917 pending_merge_block_ = true; 918 } 919 920 921 void HGraphBuilder::IfBuilder::Else() { 922 ASSERT(did_then_); 923 ASSERT(!captured_); 924 ASSERT(!finished_); 925 AddMergeAtJoinBlock(false); 926 builder_->set_current_block(first_false_block_); 927 pending_merge_block_ = true; 928 did_else_ = true; 929 } 930 931 932 void HGraphBuilder::IfBuilder::Deopt(const char* reason) { 933 ASSERT(did_then_); 934 builder_->Add<HDeoptimize>(reason, Deoptimizer::EAGER); 935 AddMergeAtJoinBlock(true); 936 } 937 938 939 void HGraphBuilder::IfBuilder::Return(HValue* value) { 940 HValue* parameter_count = builder_->graph()->GetConstantMinus1(); 941 builder_->FinishExitCurrentBlock( 942 builder_->New<HReturn>(value, parameter_count)); 943 AddMergeAtJoinBlock(false); 944 } 945 946 947 void HGraphBuilder::IfBuilder::AddMergeAtJoinBlock(bool deopt) { 948 if (!pending_merge_block_) return; 949 HBasicBlock* block = builder_->current_block(); 950 ASSERT(block == NULL || !block->IsFinished()); 951 MergeAtJoinBlock* record = 952 new(builder_->zone()) MergeAtJoinBlock(block, deopt, 953 merge_at_join_blocks_); 954 merge_at_join_blocks_ = record; 955 if (block != NULL) { 956 ASSERT(block->end() == NULL); 957 if (deopt) { 958 normal_merge_at_join_block_count_++; 959 } else { 960 deopt_merge_at_join_block_count_++; 961 } 962 } 963 builder_->set_current_block(NULL); 964 pending_merge_block_ = false; 965 } 966 967 968 void HGraphBuilder::IfBuilder::Finish() { 969 ASSERT(!finished_); 970 if (!did_then_) { 971 Then(); 972 } 973 AddMergeAtJoinBlock(false); 974 if (!did_else_) { 975 Else(); 976 AddMergeAtJoinBlock(false); 977 } 978 finished_ = true; 979 } 980 981 982 void HGraphBuilder::IfBuilder::Finish(HBasicBlock** then_continuation, 983 HBasicBlock** else_continuation) { 984 Finish(); 985 986 MergeAtJoinBlock* else_record = merge_at_join_blocks_; 987 if (else_continuation != NULL) { 988 *else_continuation = else_record->block_; 989 } 990 MergeAtJoinBlock* then_record = else_record->next_; 991 if (then_continuation != NULL) { 992 *then_continuation = then_record->block_; 993 } 994 ASSERT(then_record->next_ == NULL); 995 } 996 997 998 void HGraphBuilder::IfBuilder::End() { 999 if (captured_) return; 1000 Finish(); 1001 1002 int total_merged_blocks = normal_merge_at_join_block_count_ + 1003 deopt_merge_at_join_block_count_; 1004 ASSERT(total_merged_blocks >= 1); 1005 HBasicBlock* merge_block = total_merged_blocks == 1 1006 ? NULL : builder_->graph()->CreateBasicBlock(); 1007 1008 // Merge non-deopt blocks first to ensure environment has right size for 1009 // padding. 1010 MergeAtJoinBlock* current = merge_at_join_blocks_; 1011 while (current != NULL) { 1012 if (!current->deopt_ && current->block_ != NULL) { 1013 // If there is only one block that makes it through to the end of the 1014 // if, then just set it as the current block and continue rather then 1015 // creating an unnecessary merge block. 1016 if (total_merged_blocks == 1) { 1017 builder_->set_current_block(current->block_); 1018 return; 1019 } 1020 builder_->GotoNoSimulate(current->block_, merge_block); 1021 } 1022 current = current->next_; 1023 } 1024 1025 // Merge deopt blocks, padding when necessary. 1026 current = merge_at_join_blocks_; 1027 while (current != NULL) { 1028 if (current->deopt_ && current->block_ != NULL) { 1029 builder_->PadEnvironmentForContinuation(current->block_, 1030 merge_block); 1031 builder_->GotoNoSimulate(current->block_, merge_block); 1032 } 1033 current = current->next_; 1034 } 1035 builder_->set_current_block(merge_block); 1036 } 1037 1038 1039 HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder, 1040 HValue* context, 1041 LoopBuilder::Direction direction) 1042 : builder_(builder), 1043 context_(context), 1044 direction_(direction), 1045 finished_(false) { 1046 header_block_ = builder->CreateLoopHeaderBlock(); 1047 body_block_ = NULL; 1048 exit_block_ = NULL; 1049 exit_trampoline_block_ = NULL; 1050 increment_amount_ = builder_->graph()->GetConstant1(); 1051 } 1052 1053 1054 HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder, 1055 HValue* context, 1056 LoopBuilder::Direction direction, 1057 HValue* increment_amount) 1058 : builder_(builder), 1059 context_(context), 1060 direction_(direction), 1061 finished_(false) { 1062 header_block_ = builder->CreateLoopHeaderBlock(); 1063 body_block_ = NULL; 1064 exit_block_ = NULL; 1065 exit_trampoline_block_ = NULL; 1066 increment_amount_ = increment_amount; 1067 } 1068 1069 1070 HValue* HGraphBuilder::LoopBuilder::BeginBody( 1071 HValue* initial, 1072 HValue* terminating, 1073 Token::Value token) { 1074 HEnvironment* env = builder_->environment(); 1075 phi_ = header_block_->AddNewPhi(env->values()->length()); 1076 phi_->AddInput(initial); 1077 env->Push(initial); 1078 builder_->GotoNoSimulate(header_block_); 1079 1080 HEnvironment* body_env = env->Copy(); 1081 HEnvironment* exit_env = env->Copy(); 1082 // Remove the phi from the expression stack 1083 body_env->Pop(); 1084 exit_env->Pop(); 1085 body_block_ = builder_->CreateBasicBlock(body_env); 1086 exit_block_ = builder_->CreateBasicBlock(exit_env); 1087 1088 builder_->set_current_block(header_block_); 1089 env->Pop(); 1090 builder_->FinishCurrentBlock(builder_->New<HCompareNumericAndBranch>( 1091 phi_, terminating, token, body_block_, exit_block_)); 1092 1093 builder_->set_current_block(body_block_); 1094 if (direction_ == kPreIncrement || direction_ == kPreDecrement) { 1095 HValue* one = builder_->graph()->GetConstant1(); 1096 if (direction_ == kPreIncrement) { 1097 increment_ = HAdd::New(zone(), context_, phi_, one); 1098 } else { 1099 increment_ = HSub::New(zone(), context_, phi_, one); 1100 } 1101 increment_->ClearFlag(HValue::kCanOverflow); 1102 builder_->AddInstruction(increment_); 1103 return increment_; 1104 } else { 1105 return phi_; 1106 } 1107 } 1108 1109 1110 void HGraphBuilder::LoopBuilder::Break() { 1111 if (exit_trampoline_block_ == NULL) { 1112 // Its the first time we saw a break. 1113 HEnvironment* env = exit_block_->last_environment()->Copy(); 1114 exit_trampoline_block_ = builder_->CreateBasicBlock(env); 1115 builder_->GotoNoSimulate(exit_block_, exit_trampoline_block_); 1116 } 1117 1118 builder_->GotoNoSimulate(exit_trampoline_block_); 1119 builder_->set_current_block(NULL); 1120 } 1121 1122 1123 void HGraphBuilder::LoopBuilder::EndBody() { 1124 ASSERT(!finished_); 1125 1126 if (direction_ == kPostIncrement || direction_ == kPostDecrement) { 1127 if (direction_ == kPostIncrement) { 1128 increment_ = HAdd::New(zone(), context_, phi_, increment_amount_); 1129 } else { 1130 increment_ = HSub::New(zone(), context_, phi_, increment_amount_); 1131 } 1132 increment_->ClearFlag(HValue::kCanOverflow); 1133 builder_->AddInstruction(increment_); 1134 } 1135 1136 // Push the new increment value on the expression stack to merge into the phi. 1137 builder_->environment()->Push(increment_); 1138 HBasicBlock* last_block = builder_->current_block(); 1139 builder_->GotoNoSimulate(last_block, header_block_); 1140 header_block_->loop_information()->RegisterBackEdge(last_block); 1141 1142 if (exit_trampoline_block_ != NULL) { 1143 builder_->set_current_block(exit_trampoline_block_); 1144 } else { 1145 builder_->set_current_block(exit_block_); 1146 } 1147 finished_ = true; 1148 } 1149 1150 1151 HGraph* HGraphBuilder::CreateGraph() { 1152 graph_ = new(zone()) HGraph(info_); 1153 if (FLAG_hydrogen_stats) isolate()->GetHStatistics()->Initialize(info_); 1154 CompilationPhase phase("H_Block building", info_); 1155 set_current_block(graph()->entry_block()); 1156 if (!BuildGraph()) return NULL; 1157 graph()->FinalizeUniqueness(); 1158 return graph_; 1159 } 1160 1161 1162 HInstruction* HGraphBuilder::AddInstruction(HInstruction* instr) { 1163 ASSERT(current_block() != NULL); 1164 ASSERT(!FLAG_emit_opt_code_positions || 1165 position_ != RelocInfo::kNoPosition || !info_->IsOptimizing()); 1166 current_block()->AddInstruction(instr, position_); 1167 if (graph()->IsInsideNoSideEffectsScope()) { 1168 instr->SetFlag(HValue::kHasNoObservableSideEffects); 1169 } 1170 return instr; 1171 } 1172 1173 1174 void HGraphBuilder::FinishCurrentBlock(HControlInstruction* last) { 1175 ASSERT(!FLAG_emit_opt_code_positions || !info_->IsOptimizing() || 1176 position_ != RelocInfo::kNoPosition); 1177 current_block()->Finish(last, position_); 1178 if (last->IsReturn() || last->IsAbnormalExit()) { 1179 set_current_block(NULL); 1180 } 1181 } 1182 1183 1184 void HGraphBuilder::FinishExitCurrentBlock(HControlInstruction* instruction) { 1185 ASSERT(!FLAG_emit_opt_code_positions || !info_->IsOptimizing() || 1186 position_ != RelocInfo::kNoPosition); 1187 current_block()->FinishExit(instruction, position_); 1188 if (instruction->IsReturn() || instruction->IsAbnormalExit()) { 1189 set_current_block(NULL); 1190 } 1191 } 1192 1193 1194 void HGraphBuilder::AddIncrementCounter(StatsCounter* counter) { 1195 if (FLAG_native_code_counters && counter->Enabled()) { 1196 HValue* reference = Add<HConstant>(ExternalReference(counter)); 1197 HValue* old_value = Add<HLoadNamedField>(reference, 1198 HObjectAccess::ForCounter()); 1199 HValue* new_value = AddUncasted<HAdd>(old_value, graph()->GetConstant1()); 1200 new_value->ClearFlag(HValue::kCanOverflow); // Ignore counter overflow 1201 Add<HStoreNamedField>(reference, HObjectAccess::ForCounter(), 1202 new_value); 1203 } 1204 } 1205 1206 1207 void HGraphBuilder::AddSimulate(BailoutId id, 1208 RemovableSimulate removable) { 1209 ASSERT(current_block() != NULL); 1210 ASSERT(!graph()->IsInsideNoSideEffectsScope()); 1211 current_block()->AddNewSimulate(id, removable); 1212 } 1213 1214 1215 HBasicBlock* HGraphBuilder::CreateBasicBlock(HEnvironment* env) { 1216 HBasicBlock* b = graph()->CreateBasicBlock(); 1217 b->SetInitialEnvironment(env); 1218 return b; 1219 } 1220 1221 1222 HBasicBlock* HGraphBuilder::CreateLoopHeaderBlock() { 1223 HBasicBlock* header = graph()->CreateBasicBlock(); 1224 HEnvironment* entry_env = environment()->CopyAsLoopHeader(header); 1225 header->SetInitialEnvironment(entry_env); 1226 header->AttachLoopInformation(); 1227 return header; 1228 } 1229 1230 1231 HValue* HGraphBuilder::BuildCheckHeapObject(HValue* obj) { 1232 if (obj->type().IsHeapObject()) return obj; 1233 return Add<HCheckHeapObject>(obj); 1234 } 1235 1236 1237 void HGraphBuilder::FinishExitWithHardDeoptimization( 1238 const char* reason, HBasicBlock* continuation) { 1239 PadEnvironmentForContinuation(current_block(), continuation); 1240 Add<HDeoptimize>(reason, Deoptimizer::EAGER); 1241 if (graph()->IsInsideNoSideEffectsScope()) { 1242 GotoNoSimulate(continuation); 1243 } else { 1244 Goto(continuation); 1245 } 1246 } 1247 1248 1249 void HGraphBuilder::PadEnvironmentForContinuation( 1250 HBasicBlock* from, 1251 HBasicBlock* continuation) { 1252 if (continuation->last_environment() != NULL) { 1253 // When merging from a deopt block to a continuation, resolve differences in 1254 // environment by pushing constant 0 and popping extra values so that the 1255 // environments match during the join. Push 0 since it has the most specific 1256 // representation, and will not influence representation inference of the 1257 // phi. 1258 int continuation_env_length = continuation->last_environment()->length(); 1259 while (continuation_env_length != from->last_environment()->length()) { 1260 if (continuation_env_length > from->last_environment()->length()) { 1261 from->last_environment()->Push(graph()->GetConstant0()); 1262 } else { 1263 from->last_environment()->Pop(); 1264 } 1265 } 1266 } else { 1267 ASSERT(continuation->predecessors()->length() == 0); 1268 } 1269 } 1270 1271 1272 HValue* HGraphBuilder::BuildCheckMap(HValue* obj, Handle<Map> map) { 1273 return Add<HCheckMaps>(obj, map, top_info()); 1274 } 1275 1276 1277 HValue* HGraphBuilder::BuildCheckString(HValue* string) { 1278 if (!string->type().IsString()) { 1279 ASSERT(!string->IsConstant() || 1280 !HConstant::cast(string)->HasStringValue()); 1281 BuildCheckHeapObject(string); 1282 return Add<HCheckInstanceType>(string, HCheckInstanceType::IS_STRING); 1283 } 1284 return string; 1285 } 1286 1287 1288 HValue* HGraphBuilder::BuildWrapReceiver(HValue* object, HValue* function) { 1289 if (object->type().IsJSObject()) return object; 1290 return Add<HWrapReceiver>(object, function); 1291 } 1292 1293 1294 HValue* HGraphBuilder::BuildCheckForCapacityGrow(HValue* object, 1295 HValue* elements, 1296 ElementsKind kind, 1297 HValue* length, 1298 HValue* key, 1299 bool is_js_array) { 1300 IfBuilder length_checker(this); 1301 1302 Token::Value token = IsHoleyElementsKind(kind) ? Token::GTE : Token::EQ; 1303 length_checker.If<HCompareNumericAndBranch>(key, length, token); 1304 1305 length_checker.Then(); 1306 1307 HValue* current_capacity = AddLoadFixedArrayLength(elements); 1308 1309 IfBuilder capacity_checker(this); 1310 1311 capacity_checker.If<HCompareNumericAndBranch>(key, current_capacity, 1312 Token::GTE); 1313 capacity_checker.Then(); 1314 1315 HValue* max_gap = Add<HConstant>(static_cast<int32_t>(JSObject::kMaxGap)); 1316 HValue* max_capacity = AddUncasted<HAdd>(current_capacity, max_gap); 1317 IfBuilder key_checker(this); 1318 key_checker.If<HCompareNumericAndBranch>(key, max_capacity, Token::LT); 1319 key_checker.Then(); 1320 key_checker.ElseDeopt("Key out of capacity range"); 1321 key_checker.End(); 1322 1323 HValue* new_capacity = BuildNewElementsCapacity(key); 1324 HValue* new_elements = BuildGrowElementsCapacity(object, elements, 1325 kind, kind, length, 1326 new_capacity); 1327 1328 environment()->Push(new_elements); 1329 capacity_checker.Else(); 1330 1331 environment()->Push(elements); 1332 capacity_checker.End(); 1333 1334 if (is_js_array) { 1335 HValue* new_length = AddUncasted<HAdd>(key, graph_->GetConstant1()); 1336 new_length->ClearFlag(HValue::kCanOverflow); 1337 1338 Add<HStoreNamedField>(object, HObjectAccess::ForArrayLength(kind), 1339 new_length); 1340 } 1341 1342 length_checker.Else(); 1343 Add<HBoundsCheck>(key, length); 1344 1345 environment()->Push(elements); 1346 length_checker.End(); 1347 1348 return environment()->Pop(); 1349 } 1350 1351 1352 HValue* HGraphBuilder::BuildCopyElementsOnWrite(HValue* object, 1353 HValue* elements, 1354 ElementsKind kind, 1355 HValue* length) { 1356 Factory* factory = isolate()->factory(); 1357 1358 IfBuilder cow_checker(this); 1359 1360 cow_checker.If<HCompareMap>(elements, factory->fixed_cow_array_map()); 1361 cow_checker.Then(); 1362 1363 HValue* capacity = AddLoadFixedArrayLength(elements); 1364 1365 HValue* new_elements = BuildGrowElementsCapacity(object, elements, kind, 1366 kind, length, capacity); 1367 1368 environment()->Push(new_elements); 1369 1370 cow_checker.Else(); 1371 1372 environment()->Push(elements); 1373 1374 cow_checker.End(); 1375 1376 return environment()->Pop(); 1377 } 1378 1379 1380 void HGraphBuilder::BuildTransitionElementsKind(HValue* object, 1381 HValue* map, 1382 ElementsKind from_kind, 1383 ElementsKind to_kind, 1384 bool is_jsarray) { 1385 ASSERT(!IsFastHoleyElementsKind(from_kind) || 1386 IsFastHoleyElementsKind(to_kind)); 1387 1388 if (AllocationSite::GetMode(from_kind, to_kind) == TRACK_ALLOCATION_SITE) { 1389 Add<HTrapAllocationMemento>(object); 1390 } 1391 1392 if (!IsSimpleMapChangeTransition(from_kind, to_kind)) { 1393 HInstruction* elements = AddLoadElements(object); 1394 1395 HInstruction* empty_fixed_array = Add<HConstant>( 1396 isolate()->factory()->empty_fixed_array()); 1397 1398 IfBuilder if_builder(this); 1399 1400 if_builder.IfNot<HCompareObjectEqAndBranch>(elements, empty_fixed_array); 1401 1402 if_builder.Then(); 1403 1404 HInstruction* elements_length = AddLoadFixedArrayLength(elements); 1405 1406 HInstruction* array_length = is_jsarray 1407 ? Add<HLoadNamedField>(object, HObjectAccess::ForArrayLength(from_kind)) 1408 : elements_length; 1409 1410 BuildGrowElementsCapacity(object, elements, from_kind, to_kind, 1411 array_length, elements_length); 1412 1413 if_builder.End(); 1414 } 1415 1416 Add<HStoreNamedField>(object, HObjectAccess::ForMap(), map); 1417 } 1418 1419 1420 HValue* HGraphBuilder::BuildUncheckedDictionaryElementLoadHelper( 1421 HValue* elements, 1422 HValue* key, 1423 HValue* hash, 1424 HValue* mask, 1425 int current_probe) { 1426 if (current_probe == kNumberDictionaryProbes) { 1427 return NULL; 1428 } 1429 1430 int32_t offset = SeededNumberDictionary::GetProbeOffset(current_probe); 1431 HValue* raw_index = (current_probe == 0) 1432 ? hash 1433 : AddUncasted<HAdd>(hash, Add<HConstant>(offset)); 1434 raw_index = AddUncasted<HBitwise>(Token::BIT_AND, raw_index, mask); 1435 int32_t entry_size = SeededNumberDictionary::kEntrySize; 1436 raw_index = AddUncasted<HMul>(raw_index, Add<HConstant>(entry_size)); 1437 raw_index->ClearFlag(HValue::kCanOverflow); 1438 1439 int32_t base_offset = SeededNumberDictionary::kElementsStartIndex; 1440 HValue* key_index = AddUncasted<HAdd>(raw_index, Add<HConstant>(base_offset)); 1441 key_index->ClearFlag(HValue::kCanOverflow); 1442 1443 HValue* candidate_key = Add<HLoadKeyed>(elements, key_index, 1444 static_cast<HValue*>(NULL), 1445 FAST_SMI_ELEMENTS); 1446 1447 IfBuilder key_compare(this); 1448 key_compare.IfNot<HCompareObjectEqAndBranch>(key, candidate_key); 1449 key_compare.Then(); 1450 { 1451 // Key at the current probe doesn't match, try at the next probe. 1452 HValue* result = BuildUncheckedDictionaryElementLoadHelper( 1453 elements, key, hash, mask, current_probe + 1); 1454 if (result == NULL) { 1455 key_compare.Deopt("probes exhausted in keyed load dictionary lookup"); 1456 result = graph()->GetConstantUndefined(); 1457 } else { 1458 Push(result); 1459 } 1460 } 1461 key_compare.Else(); 1462 { 1463 // Key at current probe matches. Details must be zero, otherwise the 1464 // dictionary element requires special handling. 1465 HValue* details_index = AddUncasted<HAdd>( 1466 raw_index, Add<HConstant>(base_offset + 2)); 1467 details_index->ClearFlag(HValue::kCanOverflow); 1468 1469 HValue* details = Add<HLoadKeyed>(elements, details_index, 1470 static_cast<HValue*>(NULL), 1471 FAST_SMI_ELEMENTS); 1472 IfBuilder details_compare(this); 1473 details_compare.If<HCompareNumericAndBranch>(details, 1474 graph()->GetConstant0(), 1475 Token::NE); 1476 details_compare.ThenDeopt("keyed load dictionary element not fast case"); 1477 1478 details_compare.Else(); 1479 { 1480 // Key matches and details are zero --> fast case. Load and return the 1481 // value. 1482 HValue* result_index = AddUncasted<HAdd>( 1483 raw_index, Add<HConstant>(base_offset + 1)); 1484 result_index->ClearFlag(HValue::kCanOverflow); 1485 1486 Push(Add<HLoadKeyed>(elements, result_index, 1487 static_cast<HValue*>(NULL), 1488 FAST_ELEMENTS)); 1489 } 1490 details_compare.End(); 1491 } 1492 key_compare.End(); 1493 1494 return Pop(); 1495 } 1496 1497 1498 HValue* HGraphBuilder::BuildElementIndexHash(HValue* index) { 1499 int32_t seed_value = static_cast<uint32_t>(isolate()->heap()->HashSeed()); 1500 HValue* seed = Add<HConstant>(seed_value); 1501 HValue* hash = AddUncasted<HBitwise>(Token::BIT_XOR, index, seed); 1502 1503 // hash = ~hash + (hash << 15); 1504 HValue* shifted_hash = AddUncasted<HShl>(hash, Add<HConstant>(15)); 1505 HValue* not_hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash, 1506 graph()->GetConstantMinus1()); 1507 hash = AddUncasted<HAdd>(shifted_hash, not_hash); 1508 1509 // hash = hash ^ (hash >> 12); 1510 shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(12)); 1511 hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash); 1512 1513 // hash = hash + (hash << 2); 1514 shifted_hash = AddUncasted<HShl>(hash, Add<HConstant>(2)); 1515 hash = AddUncasted<HAdd>(hash, shifted_hash); 1516 1517 // hash = hash ^ (hash >> 4); 1518 shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(4)); 1519 hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash); 1520 1521 // hash = hash * 2057; 1522 hash = AddUncasted<HMul>(hash, Add<HConstant>(2057)); 1523 hash->ClearFlag(HValue::kCanOverflow); 1524 1525 // hash = hash ^ (hash >> 16); 1526 shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(16)); 1527 return AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash); 1528 } 1529 1530 1531 HValue* HGraphBuilder::BuildUncheckedDictionaryElementLoad(HValue* receiver, 1532 HValue* key) { 1533 HValue* elements = AddLoadElements(receiver); 1534 1535 HValue* hash = BuildElementIndexHash(key); 1536 1537 HValue* capacity = Add<HLoadKeyed>( 1538 elements, 1539 Add<HConstant>(NameDictionary::kCapacityIndex), 1540 static_cast<HValue*>(NULL), 1541 FAST_SMI_ELEMENTS); 1542 1543 HValue* mask = AddUncasted<HSub>(capacity, graph()->GetConstant1()); 1544 mask->ChangeRepresentation(Representation::Integer32()); 1545 mask->ClearFlag(HValue::kCanOverflow); 1546 1547 return BuildUncheckedDictionaryElementLoadHelper(elements, key, 1548 hash, mask, 0); 1549 } 1550 1551 1552 HValue* HGraphBuilder::BuildNumberToString(HValue* object, 1553 Handle<Type> type) { 1554 NoObservableSideEffectsScope scope(this); 1555 1556 // Convert constant numbers at compile time. 1557 if (object->IsConstant() && HConstant::cast(object)->HasNumberValue()) { 1558 Handle<Object> number = HConstant::cast(object)->handle(isolate()); 1559 Handle<String> result = isolate()->factory()->NumberToString(number); 1560 return Add<HConstant>(result); 1561 } 1562 1563 // Create a joinable continuation. 1564 HIfContinuation found(graph()->CreateBasicBlock(), 1565 graph()->CreateBasicBlock()); 1566 1567 // Load the number string cache. 1568 HValue* number_string_cache = 1569 Add<HLoadRoot>(Heap::kNumberStringCacheRootIndex); 1570 1571 // Make the hash mask from the length of the number string cache. It 1572 // contains two elements (number and string) for each cache entry. 1573 HValue* mask = AddLoadFixedArrayLength(number_string_cache); 1574 mask->set_type(HType::Smi()); 1575 mask = AddUncasted<HSar>(mask, graph()->GetConstant1()); 1576 mask = AddUncasted<HSub>(mask, graph()->GetConstant1()); 1577 1578 // Check whether object is a smi. 1579 IfBuilder if_objectissmi(this); 1580 if_objectissmi.If<HIsSmiAndBranch>(object); 1581 if_objectissmi.Then(); 1582 { 1583 // Compute hash for smi similar to smi_get_hash(). 1584 HValue* hash = AddUncasted<HBitwise>(Token::BIT_AND, object, mask); 1585 1586 // Load the key. 1587 HValue* key_index = AddUncasted<HShl>(hash, graph()->GetConstant1()); 1588 HValue* key = Add<HLoadKeyed>(number_string_cache, key_index, 1589 static_cast<HValue*>(NULL), 1590 FAST_ELEMENTS, ALLOW_RETURN_HOLE); 1591 1592 // Check if object == key. 1593 IfBuilder if_objectiskey(this); 1594 if_objectiskey.If<HCompareObjectEqAndBranch>(object, key); 1595 if_objectiskey.Then(); 1596 { 1597 // Make the key_index available. 1598 Push(key_index); 1599 } 1600 if_objectiskey.JoinContinuation(&found); 1601 } 1602 if_objectissmi.Else(); 1603 { 1604 if (type->Is(Type::Smi())) { 1605 if_objectissmi.Deopt("Expected smi"); 1606 } else { 1607 // Check if the object is a heap number. 1608 IfBuilder if_objectisnumber(this); 1609 if_objectisnumber.If<HCompareMap>( 1610 object, isolate()->factory()->heap_number_map()); 1611 if_objectisnumber.Then(); 1612 { 1613 // Compute hash for heap number similar to double_get_hash(). 1614 HValue* low = Add<HLoadNamedField>( 1615 object, HObjectAccess::ForHeapNumberValueLowestBits()); 1616 HValue* high = Add<HLoadNamedField>( 1617 object, HObjectAccess::ForHeapNumberValueHighestBits()); 1618 HValue* hash = AddUncasted<HBitwise>(Token::BIT_XOR, low, high); 1619 hash = AddUncasted<HBitwise>(Token::BIT_AND, hash, mask); 1620 1621 // Load the key. 1622 HValue* key_index = AddUncasted<HShl>(hash, graph()->GetConstant1()); 1623 HValue* key = Add<HLoadKeyed>(number_string_cache, key_index, 1624 static_cast<HValue*>(NULL), 1625 FAST_ELEMENTS, ALLOW_RETURN_HOLE); 1626 1627 // Check if key is a heap number (the number string cache contains only 1628 // SMIs and heap number, so it is sufficient to do a SMI check here). 1629 IfBuilder if_keyisnotsmi(this); 1630 if_keyisnotsmi.IfNot<HIsSmiAndBranch>(key); 1631 if_keyisnotsmi.Then(); 1632 { 1633 // Check if values of key and object match. 1634 IfBuilder if_keyeqobject(this); 1635 if_keyeqobject.If<HCompareNumericAndBranch>( 1636 Add<HLoadNamedField>(key, HObjectAccess::ForHeapNumberValue()), 1637 Add<HLoadNamedField>(object, HObjectAccess::ForHeapNumberValue()), 1638 Token::EQ); 1639 if_keyeqobject.Then(); 1640 { 1641 // Make the key_index available. 1642 Push(key_index); 1643 } 1644 if_keyeqobject.JoinContinuation(&found); 1645 } 1646 if_keyisnotsmi.JoinContinuation(&found); 1647 } 1648 if_objectisnumber.Else(); 1649 { 1650 if (type->Is(Type::Number())) { 1651 if_objectisnumber.Deopt("Expected heap number"); 1652 } 1653 } 1654 if_objectisnumber.JoinContinuation(&found); 1655 } 1656 } 1657 if_objectissmi.JoinContinuation(&found); 1658 1659 // Check for cache hit. 1660 IfBuilder if_found(this, &found); 1661 if_found.Then(); 1662 { 1663 // Count number to string operation in native code. 1664 AddIncrementCounter(isolate()->counters()->number_to_string_native()); 1665 1666 // Load the value in case of cache hit. 1667 HValue* key_index = Pop(); 1668 HValue* value_index = AddUncasted<HAdd>(key_index, graph()->GetConstant1()); 1669 Push(Add<HLoadKeyed>(number_string_cache, value_index, 1670 static_cast<HValue*>(NULL), 1671 FAST_ELEMENTS, ALLOW_RETURN_HOLE)); 1672 } 1673 if_found.Else(); 1674 { 1675 // Cache miss, fallback to runtime. 1676 Add<HPushArgument>(object); 1677 Push(Add<HCallRuntime>( 1678 isolate()->factory()->empty_string(), 1679 Runtime::FunctionForId(Runtime::kNumberToStringSkipCache), 1680 1)); 1681 } 1682 if_found.End(); 1683 1684 return Pop(); 1685 } 1686 1687 1688 HValue* HGraphBuilder::BuildSeqStringSizeFor(HValue* length, 1689 String::Encoding encoding) { 1690 STATIC_ASSERT((SeqString::kHeaderSize & kObjectAlignmentMask) == 0); 1691 HValue* size = length; 1692 if (encoding == String::TWO_BYTE_ENCODING) { 1693 size = AddUncasted<HShl>(length, graph()->GetConstant1()); 1694 size->ClearFlag(HValue::kCanOverflow); 1695 size->SetFlag(HValue::kUint32); 1696 } 1697 size = AddUncasted<HAdd>(size, Add<HConstant>(static_cast<int32_t>( 1698 SeqString::kHeaderSize + kObjectAlignmentMask))); 1699 size->ClearFlag(HValue::kCanOverflow); 1700 size = AddUncasted<HBitwise>( 1701 Token::BIT_AND, size, Add<HConstant>(static_cast<int32_t>( 1702 ~kObjectAlignmentMask))); 1703 return size; 1704 } 1705 1706 1707 void HGraphBuilder::BuildCopySeqStringChars(HValue* src, 1708 HValue* src_offset, 1709 String::Encoding src_encoding, 1710 HValue* dst, 1711 HValue* dst_offset, 1712 String::Encoding dst_encoding, 1713 HValue* length) { 1714 ASSERT(dst_encoding != String::ONE_BYTE_ENCODING || 1715 src_encoding == String::ONE_BYTE_ENCODING); 1716 LoopBuilder loop(this, context(), LoopBuilder::kPostIncrement); 1717 HValue* index = loop.BeginBody(graph()->GetConstant0(), length, Token::LT); 1718 { 1719 HValue* src_index = AddUncasted<HAdd>(src_offset, index); 1720 HValue* value = 1721 AddUncasted<HSeqStringGetChar>(src_encoding, src, src_index); 1722 HValue* dst_index = AddUncasted<HAdd>(dst_offset, index); 1723 Add<HSeqStringSetChar>(dst_encoding, dst, dst_index, value); 1724 } 1725 loop.EndBody(); 1726 } 1727 1728 1729 HValue* HGraphBuilder::BuildUncheckedStringAdd(HValue* left, 1730 HValue* right, 1731 PretenureFlag pretenure_flag) { 1732 // Determine the string lengths. 1733 HValue* left_length = Add<HLoadNamedField>( 1734 left, HObjectAccess::ForStringLength()); 1735 HValue* right_length = Add<HLoadNamedField>( 1736 right, HObjectAccess::ForStringLength()); 1737 1738 // Compute the combined string length. If the result is larger than the max 1739 // supported string length, we bailout to the runtime. This is done implicitly 1740 // when converting the result back to a smi in case the max string length 1741 // equals the max smi valie. Otherwise, for platforms with 32-bit smis, we do 1742 HValue* length = AddUncasted<HAdd>(left_length, right_length); 1743 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue); 1744 if (String::kMaxLength != Smi::kMaxValue) { 1745 IfBuilder if_nooverflow(this); 1746 if_nooverflow.If<HCompareNumericAndBranch>( 1747 length, Add<HConstant>(String::kMaxLength), Token::LTE); 1748 if_nooverflow.Then(); 1749 if_nooverflow.ElseDeopt("String length exceeds limit"); 1750 } 1751 1752 // Determine the string instance types. 1753 HLoadNamedField* left_instance_type = Add<HLoadNamedField>( 1754 Add<HLoadNamedField>(left, HObjectAccess::ForMap()), 1755 HObjectAccess::ForMapInstanceType()); 1756 HLoadNamedField* right_instance_type = Add<HLoadNamedField>( 1757 Add<HLoadNamedField>(right, HObjectAccess::ForMap()), 1758 HObjectAccess::ForMapInstanceType()); 1759 1760 // Compute difference of instance types. 1761 HValue* xored_instance_types = AddUncasted<HBitwise>( 1762 Token::BIT_XOR, left_instance_type, right_instance_type); 1763 1764 // Check if we should create a cons string. 1765 IfBuilder if_createcons(this); 1766 if_createcons.If<HCompareNumericAndBranch>( 1767 length, Add<HConstant>(ConsString::kMinLength), Token::GTE); 1768 if_createcons.Then(); 1769 { 1770 // Allocate the cons string object. HAllocate does not care whether we 1771 // pass CONS_STRING_TYPE or CONS_ASCII_STRING_TYPE here, so we just use 1772 // CONS_STRING_TYPE here. Below we decide whether the cons string is 1773 // one-byte or two-byte and set the appropriate map. 1774 HAllocate* string = Add<HAllocate>(Add<HConstant>(ConsString::kSize), 1775 HType::String(), pretenure_flag, 1776 CONS_STRING_TYPE); 1777 1778 // Compute the intersection of instance types. 1779 HValue* anded_instance_types = AddUncasted<HBitwise>( 1780 Token::BIT_AND, left_instance_type, right_instance_type); 1781 1782 // We create a one-byte cons string if 1783 // 1. both strings are one-byte, or 1784 // 2. at least one of the strings is two-byte, but happens to contain only 1785 // one-byte characters. 1786 // To do this, we check 1787 // 1. if both strings are one-byte, or if the one-byte data hint is set in 1788 // both strings, or 1789 // 2. if one of the strings has the one-byte data hint set and the other 1790 // string is one-byte. 1791 IfBuilder if_onebyte(this); 1792 STATIC_ASSERT(kOneByteStringTag != 0); 1793 STATIC_ASSERT(kOneByteDataHintMask != 0); 1794 if_onebyte.If<HCompareNumericAndBranch>( 1795 AddUncasted<HBitwise>( 1796 Token::BIT_AND, anded_instance_types, 1797 Add<HConstant>(static_cast<int32_t>( 1798 kStringEncodingMask | kOneByteDataHintMask))), 1799 graph()->GetConstant0(), Token::NE); 1800 if_onebyte.Or(); 1801 STATIC_ASSERT(kOneByteStringTag != 0 && 1802 kOneByteDataHintTag != 0 && 1803 kOneByteDataHintTag != kOneByteStringTag); 1804 if_onebyte.If<HCompareNumericAndBranch>( 1805 AddUncasted<HBitwise>( 1806 Token::BIT_AND, xored_instance_types, 1807 Add<HConstant>(static_cast<int32_t>( 1808 kOneByteStringTag | kOneByteDataHintTag))), 1809 Add<HConstant>(static_cast<int32_t>( 1810 kOneByteStringTag | kOneByteDataHintTag)), Token::EQ); 1811 if_onebyte.Then(); 1812 { 1813 // We can safely skip the write barrier for storing the map here. 1814 Handle<Map> map = isolate()->factory()->cons_ascii_string_map(); 1815 AddStoreMapConstantNoWriteBarrier(string, map); 1816 } 1817 if_onebyte.Else(); 1818 { 1819 // We can safely skip the write barrier for storing the map here. 1820 Handle<Map> map = isolate()->factory()->cons_string_map(); 1821 AddStoreMapConstantNoWriteBarrier(string, map); 1822 } 1823 if_onebyte.End(); 1824 1825 // Initialize the cons string fields. 1826 Add<HStoreNamedField>(string, HObjectAccess::ForStringHashField(), 1827 Add<HConstant>(String::kEmptyHashField)); 1828 Add<HStoreNamedField>(string, HObjectAccess::ForStringLength(), length); 1829 Add<HStoreNamedField>(string, HObjectAccess::ForConsStringFirst(), left); 1830 Add<HStoreNamedField>(string, HObjectAccess::ForConsStringSecond(), 1831 right); 1832 1833 // Count the native string addition. 1834 AddIncrementCounter(isolate()->counters()->string_add_native()); 1835 1836 // Cons string is result. 1837 Push(string); 1838 } 1839 if_createcons.Else(); 1840 { 1841 // Compute union of instance types. 1842 HValue* ored_instance_types = AddUncasted<HBitwise>( 1843 Token::BIT_OR, left_instance_type, right_instance_type); 1844 1845 // Check if both strings have the same encoding and both are 1846 // sequential. 1847 IfBuilder if_sameencodingandsequential(this); 1848 if_sameencodingandsequential.If<HCompareNumericAndBranch>( 1849 AddUncasted<HBitwise>( 1850 Token::BIT_AND, xored_instance_types, 1851 Add<HConstant>(static_cast<int32_t>(kStringEncodingMask))), 1852 graph()->GetConstant0(), Token::EQ); 1853 if_sameencodingandsequential.And(); 1854 STATIC_ASSERT(kSeqStringTag == 0); 1855 if_sameencodingandsequential.If<HCompareNumericAndBranch>( 1856 AddUncasted<HBitwise>( 1857 Token::BIT_AND, ored_instance_types, 1858 Add<HConstant>(static_cast<int32_t>(kStringRepresentationMask))), 1859 graph()->GetConstant0(), Token::EQ); 1860 if_sameencodingandsequential.Then(); 1861 { 1862 // Check if the result is a one-byte string. 1863 IfBuilder if_onebyte(this); 1864 STATIC_ASSERT(kOneByteStringTag != 0); 1865 if_onebyte.If<HCompareNumericAndBranch>( 1866 AddUncasted<HBitwise>( 1867 Token::BIT_AND, ored_instance_types, 1868 Add<HConstant>(static_cast<int32_t>(kStringEncodingMask))), 1869 graph()->GetConstant0(), Token::NE); 1870 if_onebyte.Then(); 1871 { 1872 // Calculate the number of bytes needed for the characters in the 1873 // string while observing object alignment. 1874 HValue* size = BuildSeqStringSizeFor( 1875 length, String::ONE_BYTE_ENCODING); 1876 1877 // Allocate the ASCII string object. 1878 Handle<Map> map = isolate()->factory()->ascii_string_map(); 1879 HAllocate* string = Add<HAllocate>(size, HType::String(), 1880 pretenure_flag, ASCII_STRING_TYPE); 1881 string->set_known_initial_map(map); 1882 1883 // We can safely skip the write barrier for storing map here. 1884 AddStoreMapConstantNoWriteBarrier(string, map); 1885 1886 // Length must be stored into the string before we copy characters to 1887 // make debug verification code happy. 1888 Add<HStoreNamedField>(string, HObjectAccess::ForStringLength(), 1889 length); 1890 1891 // Copy bytes from the left string. 1892 BuildCopySeqStringChars( 1893 left, graph()->GetConstant0(), String::ONE_BYTE_ENCODING, 1894 string, graph()->GetConstant0(), String::ONE_BYTE_ENCODING, 1895 left_length); 1896 1897 // Copy bytes from the right string. 1898 BuildCopySeqStringChars( 1899 right, graph()->GetConstant0(), String::ONE_BYTE_ENCODING, 1900 string, left_length, String::ONE_BYTE_ENCODING, 1901 right_length); 1902 1903 // Count the native string addition. 1904 AddIncrementCounter(isolate()->counters()->string_add_native()); 1905 1906 // Return the string. 1907 Push(string); 1908 } 1909 if_onebyte.Else(); 1910 { 1911 // Calculate the number of bytes needed for the characters in the 1912 // string while observing object alignment. 1913 HValue* size = BuildSeqStringSizeFor( 1914 length, String::TWO_BYTE_ENCODING); 1915 1916 // Allocate the two-byte string object. 1917 Handle<Map> map = isolate()->factory()->string_map(); 1918 HAllocate* string = Add<HAllocate>(size, HType::String(), 1919 pretenure_flag, STRING_TYPE); 1920 string->set_known_initial_map(map); 1921 1922 // We can safely skip the write barrier for storing map here. 1923 AddStoreMapConstantNoWriteBarrier(string, map); 1924 1925 // Length must be stored into the string before we copy characters to 1926 // make debug verification code happy. 1927 Add<HStoreNamedField>(string, HObjectAccess::ForStringLength(), 1928 length); 1929 1930 // Copy bytes from the left string. 1931 BuildCopySeqStringChars( 1932 left, graph()->GetConstant0(), String::TWO_BYTE_ENCODING, 1933 string, graph()->GetConstant0(), String::TWO_BYTE_ENCODING, 1934 left_length); 1935 1936 // Copy bytes from the right string. 1937 BuildCopySeqStringChars( 1938 right, graph()->GetConstant0(), String::TWO_BYTE_ENCODING, 1939 string, left_length, String::TWO_BYTE_ENCODING, 1940 right_length); 1941 1942 // Return the string. 1943 Push(string); 1944 } 1945 if_onebyte.End(); 1946 1947 // Initialize the (common) string fields. 1948 HValue* string = Pop(); 1949 Add<HStoreNamedField>(string, HObjectAccess::ForStringHashField(), 1950 Add<HConstant>(String::kEmptyHashField)); 1951 1952 // Count the native string addition. 1953 AddIncrementCounter(isolate()->counters()->string_add_native()); 1954 1955 Push(string); 1956 } 1957 if_sameencodingandsequential.Else(); 1958 { 1959 // Fallback to the runtime to add the two strings. 1960 Add<HPushArgument>(left); 1961 Add<HPushArgument>(right); 1962 Push(Add<HCallRuntime>(isolate()->factory()->empty_string(), 1963 Runtime::FunctionForId(Runtime::kStringAdd), 1964 2)); 1965 } 1966 if_sameencodingandsequential.End(); 1967 } 1968 if_createcons.End(); 1969 1970 return Pop(); 1971 } 1972 1973 1974 HValue* HGraphBuilder::BuildStringAdd(HValue* left, 1975 HValue* right, 1976 PretenureFlag pretenure_flag) { 1977 // Determine the string lengths. 1978 HValue* left_length = Add<HLoadNamedField>( 1979 left, HObjectAccess::ForStringLength()); 1980 HValue* right_length = Add<HLoadNamedField>( 1981 right, HObjectAccess::ForStringLength()); 1982 1983 // Check if left string is empty. 1984 IfBuilder if_leftisempty(this); 1985 if_leftisempty.If<HCompareNumericAndBranch>( 1986 left_length, graph()->GetConstant0(), Token::EQ); 1987 if_leftisempty.Then(); 1988 { 1989 // Count the native string addition. 1990 AddIncrementCounter(isolate()->counters()->string_add_native()); 1991 1992 // Just return the right string. 1993 Push(right); 1994 } 1995 if_leftisempty.Else(); 1996 { 1997 // Check if right string is empty. 1998 IfBuilder if_rightisempty(this); 1999 if_rightisempty.If<HCompareNumericAndBranch>( 2000 right_length, graph()->GetConstant0(), Token::EQ); 2001 if_rightisempty.Then(); 2002 { 2003 // Count the native string addition. 2004 AddIncrementCounter(isolate()->counters()->string_add_native()); 2005 2006 // Just return the left string. 2007 Push(left); 2008 } 2009 if_rightisempty.Else(); 2010 { 2011 // Concatenate the two non-empty strings. 2012 Push(BuildUncheckedStringAdd(left, right, pretenure_flag)); 2013 } 2014 if_rightisempty.End(); 2015 } 2016 if_leftisempty.End(); 2017 2018 return Pop(); 2019 } 2020 2021 2022 HInstruction* HGraphBuilder::BuildUncheckedMonomorphicElementAccess( 2023 HValue* checked_object, 2024 HValue* key, 2025 HValue* val, 2026 bool is_js_array, 2027 ElementsKind elements_kind, 2028 bool is_store, 2029 LoadKeyedHoleMode load_mode, 2030 KeyedAccessStoreMode store_mode) { 2031 ASSERT(!IsExternalArrayElementsKind(elements_kind) || !is_js_array); 2032 // No GVNFlag is necessary for ElementsKind if there is an explicit dependency 2033 // on a HElementsTransition instruction. The flag can also be removed if the 2034 // map to check has FAST_HOLEY_ELEMENTS, since there can be no further 2035 // ElementsKind transitions. Finally, the dependency can be removed for stores 2036 // for FAST_ELEMENTS, since a transition to HOLEY elements won't change the 2037 // generated store code. 2038 if ((elements_kind == FAST_HOLEY_ELEMENTS) || 2039 (elements_kind == FAST_ELEMENTS && is_store)) { 2040 checked_object->ClearGVNFlag(kDependsOnElementsKind); 2041 } 2042 2043 bool fast_smi_only_elements = IsFastSmiElementsKind(elements_kind); 2044 bool fast_elements = IsFastObjectElementsKind(elements_kind); 2045 HValue* elements = AddLoadElements(checked_object); 2046 if (is_store && (fast_elements || fast_smi_only_elements) && 2047 store_mode != STORE_NO_TRANSITION_HANDLE_COW) { 2048 HCheckMaps* check_cow_map = Add<HCheckMaps>( 2049 elements, isolate()->factory()->fixed_array_map(), top_info()); 2050 check_cow_map->ClearGVNFlag(kDependsOnElementsKind); 2051 } 2052 HInstruction* length = NULL; 2053 if (is_js_array) { 2054 length = Add<HLoadNamedField>( 2055 checked_object, HObjectAccess::ForArrayLength(elements_kind)); 2056 } else { 2057 length = AddLoadFixedArrayLength(elements); 2058 } 2059 length->set_type(HType::Smi()); 2060 HValue* checked_key = NULL; 2061 if (IsExternalArrayElementsKind(elements_kind)) { 2062 if (store_mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS) { 2063 NoObservableSideEffectsScope no_effects(this); 2064 HLoadExternalArrayPointer* external_elements = 2065 Add<HLoadExternalArrayPointer>(elements); 2066 IfBuilder length_checker(this); 2067 length_checker.If<HCompareNumericAndBranch>(key, length, Token::LT); 2068 length_checker.Then(); 2069 IfBuilder negative_checker(this); 2070 HValue* bounds_check = negative_checker.If<HCompareNumericAndBranch>( 2071 key, graph()->GetConstant0(), Token::GTE); 2072 negative_checker.Then(); 2073 HInstruction* result = AddElementAccess( 2074 external_elements, key, val, bounds_check, elements_kind, is_store); 2075 negative_checker.ElseDeopt("Negative key encountered"); 2076 negative_checker.End(); 2077 length_checker.End(); 2078 return result; 2079 } else { 2080 ASSERT(store_mode == STANDARD_STORE); 2081 checked_key = Add<HBoundsCheck>(key, length); 2082 HLoadExternalArrayPointer* external_elements = 2083 Add<HLoadExternalArrayPointer>(elements); 2084 return AddElementAccess( 2085 external_elements, checked_key, val, 2086 checked_object, elements_kind, is_store); 2087 } 2088 } 2089 ASSERT(fast_smi_only_elements || 2090 fast_elements || 2091 IsFastDoubleElementsKind(elements_kind)); 2092 2093 // In case val is stored into a fast smi array, assure that the value is a smi 2094 // before manipulating the backing store. Otherwise the actual store may 2095 // deopt, leaving the backing store in an invalid state. 2096 if (is_store && IsFastSmiElementsKind(elements_kind) && 2097 !val->type().IsSmi()) { 2098 val = AddUncasted<HForceRepresentation>(val, Representation::Smi()); 2099 } 2100 2101 if (IsGrowStoreMode(store_mode)) { 2102 NoObservableSideEffectsScope no_effects(this); 2103 elements = BuildCheckForCapacityGrow(checked_object, elements, 2104 elements_kind, length, key, 2105 is_js_array); 2106 checked_key = key; 2107 } else { 2108 checked_key = Add<HBoundsCheck>(key, length); 2109 2110 if (is_store && (fast_elements || fast_smi_only_elements)) { 2111 if (store_mode == STORE_NO_TRANSITION_HANDLE_COW) { 2112 NoObservableSideEffectsScope no_effects(this); 2113 elements = BuildCopyElementsOnWrite(checked_object, elements, 2114 elements_kind, length); 2115 } else { 2116 HCheckMaps* check_cow_map = Add<HCheckMaps>( 2117 elements, isolate()->factory()->fixed_array_map(), top_info()); 2118 check_cow_map->ClearGVNFlag(kDependsOnElementsKind); 2119 } 2120 } 2121 } 2122 return AddElementAccess(elements, checked_key, val, checked_object, 2123 elements_kind, is_store, load_mode); 2124 } 2125 2126 2127 2128 HValue* HGraphBuilder::BuildAllocateArrayFromLength( 2129 JSArrayBuilder* array_builder, 2130 HValue* length_argument) { 2131 if (length_argument->IsConstant() && 2132 HConstant::cast(length_argument)->HasSmiValue()) { 2133 int array_length = HConstant::cast(length_argument)->Integer32Value(); 2134 HValue* new_object = array_length == 0 2135 ? array_builder->AllocateEmptyArray() 2136 : array_builder->AllocateArray(length_argument, length_argument); 2137 return new_object; 2138 } 2139 2140 HValue* constant_zero = graph()->GetConstant0(); 2141 HConstant* max_alloc_length = 2142 Add<HConstant>(JSObject::kInitialMaxFastElementArray); 2143 HInstruction* checked_length = Add<HBoundsCheck>(length_argument, 2144 max_alloc_length); 2145 IfBuilder if_builder(this); 2146 if_builder.If<HCompareNumericAndBranch>(checked_length, constant_zero, 2147 Token::EQ); 2148 if_builder.Then(); 2149 const int initial_capacity = JSArray::kPreallocatedArrayElements; 2150 HConstant* initial_capacity_node = Add<HConstant>(initial_capacity); 2151 Push(initial_capacity_node); // capacity 2152 Push(constant_zero); // length 2153 if_builder.Else(); 2154 if (!(top_info()->IsStub()) && 2155 IsFastPackedElementsKind(array_builder->kind())) { 2156 // We'll come back later with better (holey) feedback. 2157 if_builder.Deopt("Holey array despite packed elements_kind feedback"); 2158 } else { 2159 Push(checked_length); // capacity 2160 Push(checked_length); // length 2161 } 2162 if_builder.End(); 2163 2164 // Figure out total size 2165 HValue* length = Pop(); 2166 HValue* capacity = Pop(); 2167 return array_builder->AllocateArray(capacity, length); 2168 } 2169 2170 HValue* HGraphBuilder::BuildAllocateElements(ElementsKind kind, 2171 HValue* capacity) { 2172 int elements_size; 2173 InstanceType instance_type; 2174 2175 if (IsFastDoubleElementsKind(kind)) { 2176 elements_size = kDoubleSize; 2177 instance_type = FIXED_DOUBLE_ARRAY_TYPE; 2178 } else { 2179 elements_size = kPointerSize; 2180 instance_type = FIXED_ARRAY_TYPE; 2181 } 2182 2183 HConstant* elements_size_value = Add<HConstant>(elements_size); 2184 HValue* mul = AddUncasted<HMul>(capacity, elements_size_value); 2185 mul->ClearFlag(HValue::kCanOverflow); 2186 2187 HConstant* header_size = Add<HConstant>(FixedArray::kHeaderSize); 2188 HValue* total_size = AddUncasted<HAdd>(mul, header_size); 2189 total_size->ClearFlag(HValue::kCanOverflow); 2190 2191 return Add<HAllocate>(total_size, HType::JSArray(), 2192 isolate()->heap()->GetPretenureMode(), instance_type); 2193 } 2194 2195 2196 void HGraphBuilder::BuildInitializeElementsHeader(HValue* elements, 2197 ElementsKind kind, 2198 HValue* capacity) { 2199 Factory* factory = isolate()->factory(); 2200 Handle<Map> map = IsFastDoubleElementsKind(kind) 2201 ? factory->fixed_double_array_map() 2202 : factory->fixed_array_map(); 2203 2204 AddStoreMapConstant(elements, map); 2205 Add<HStoreNamedField>(elements, HObjectAccess::ForFixedArrayLength(), 2206 capacity); 2207 } 2208 2209 2210 HValue* HGraphBuilder::BuildAllocateElementsAndInitializeElementsHeader( 2211 ElementsKind kind, 2212 HValue* capacity) { 2213 // The HForceRepresentation is to prevent possible deopt on int-smi 2214 // conversion after allocation but before the new object fields are set. 2215 capacity = AddUncasted<HForceRepresentation>(capacity, Representation::Smi()); 2216 HValue* new_elements = BuildAllocateElements(kind, capacity); 2217 BuildInitializeElementsHeader(new_elements, kind, capacity); 2218 return new_elements; 2219 } 2220 2221 2222 HInnerAllocatedObject* HGraphBuilder::BuildJSArrayHeader(HValue* array, 2223 HValue* array_map, 2224 AllocationSiteMode mode, 2225 ElementsKind elements_kind, 2226 HValue* allocation_site_payload, 2227 HValue* length_field) { 2228 2229 Add<HStoreNamedField>(array, HObjectAccess::ForMap(), array_map); 2230 2231 HConstant* empty_fixed_array = 2232 Add<HConstant>(isolate()->factory()->empty_fixed_array()); 2233 2234 HObjectAccess access = HObjectAccess::ForPropertiesPointer(); 2235 Add<HStoreNamedField>(array, access, empty_fixed_array); 2236 Add<HStoreNamedField>(array, HObjectAccess::ForArrayLength(elements_kind), 2237 length_field); 2238 2239 if (mode == TRACK_ALLOCATION_SITE) { 2240 BuildCreateAllocationMemento( 2241 array, Add<HConstant>(JSArray::kSize), allocation_site_payload); 2242 } 2243 2244 int elements_location = JSArray::kSize; 2245 if (mode == TRACK_ALLOCATION_SITE) { 2246 elements_location += AllocationMemento::kSize; 2247 } 2248 2249 HInnerAllocatedObject* elements = Add<HInnerAllocatedObject>( 2250 array, Add<HConstant>(elements_location)); 2251 Add<HStoreNamedField>(array, HObjectAccess::ForElementsPointer(), elements); 2252 return elements; 2253 } 2254 2255 2256 HInstruction* HGraphBuilder::AddElementAccess( 2257 HValue* elements, 2258 HValue* checked_key, 2259 HValue* val, 2260 HValue* dependency, 2261 ElementsKind elements_kind, 2262 bool is_store, 2263 LoadKeyedHoleMode load_mode) { 2264 if (is_store) { 2265 ASSERT(val != NULL); 2266 if (elements_kind == EXTERNAL_PIXEL_ELEMENTS) { 2267 val = Add<HClampToUint8>(val); 2268 } 2269 return Add<HStoreKeyed>(elements, checked_key, val, elements_kind); 2270 } 2271 2272 ASSERT(!is_store); 2273 ASSERT(val == NULL); 2274 HLoadKeyed* load = Add<HLoadKeyed>( 2275 elements, checked_key, dependency, elements_kind, load_mode); 2276 if (FLAG_opt_safe_uint32_operations && 2277 elements_kind == EXTERNAL_UNSIGNED_INT_ELEMENTS) { 2278 graph()->RecordUint32Instruction(load); 2279 } 2280 return load; 2281 } 2282 2283 2284 HLoadNamedField* HGraphBuilder::AddLoadElements(HValue* object) { 2285 return Add<HLoadNamedField>(object, HObjectAccess::ForElementsPointer()); 2286 } 2287 2288 2289 HLoadNamedField* HGraphBuilder::AddLoadFixedArrayLength(HValue* object) { 2290 return Add<HLoadNamedField>(object, 2291 HObjectAccess::ForFixedArrayLength()); 2292 } 2293 2294 2295 HValue* HGraphBuilder::BuildNewElementsCapacity(HValue* old_capacity) { 2296 HValue* half_old_capacity = AddUncasted<HShr>(old_capacity, 2297 graph_->GetConstant1()); 2298 2299 HValue* new_capacity = AddUncasted<HAdd>(half_old_capacity, old_capacity); 2300 new_capacity->ClearFlag(HValue::kCanOverflow); 2301 2302 HValue* min_growth = Add<HConstant>(16); 2303 2304 new_capacity = AddUncasted<HAdd>(new_capacity, min_growth); 2305 new_capacity->ClearFlag(HValue::kCanOverflow); 2306 2307 return new_capacity; 2308 } 2309 2310 2311 void HGraphBuilder::BuildNewSpaceArrayCheck(HValue* length, ElementsKind kind) { 2312 Heap* heap = isolate()->heap(); 2313 int element_size = IsFastDoubleElementsKind(kind) ? kDoubleSize 2314 : kPointerSize; 2315 int max_size = heap->MaxRegularSpaceAllocationSize() / element_size; 2316 max_size -= JSArray::kSize / element_size; 2317 HConstant* max_size_constant = Add<HConstant>(max_size); 2318 Add<HBoundsCheck>(length, max_size_constant); 2319 } 2320 2321 2322 HValue* HGraphBuilder::BuildGrowElementsCapacity(HValue* object, 2323 HValue* elements, 2324 ElementsKind kind, 2325 ElementsKind new_kind, 2326 HValue* length, 2327 HValue* new_capacity) { 2328 BuildNewSpaceArrayCheck(new_capacity, new_kind); 2329 2330 HValue* new_elements = BuildAllocateElementsAndInitializeElementsHeader( 2331 new_kind, new_capacity); 2332 2333 BuildCopyElements(elements, kind, 2334 new_elements, new_kind, 2335 length, new_capacity); 2336 2337 Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(), 2338 new_elements); 2339 2340 return new_elements; 2341 } 2342 2343 2344 void HGraphBuilder::BuildFillElementsWithHole(HValue* elements, 2345 ElementsKind elements_kind, 2346 HValue* from, 2347 HValue* to) { 2348 // Fast elements kinds need to be initialized in case statements below cause 2349 // a garbage collection. 2350 Factory* factory = isolate()->factory(); 2351 2352 double nan_double = FixedDoubleArray::hole_nan_as_double(); 2353 HValue* hole = IsFastSmiOrObjectElementsKind(elements_kind) 2354 ? Add<HConstant>(factory->the_hole_value()) 2355 : Add<HConstant>(nan_double); 2356 2357 // Special loop unfolding case 2358 static const int kLoopUnfoldLimit = 8; 2359 STATIC_ASSERT(JSArray::kPreallocatedArrayElements <= kLoopUnfoldLimit); 2360 int initial_capacity = -1; 2361 if (from->IsInteger32Constant() && to->IsInteger32Constant()) { 2362 int constant_from = from->GetInteger32Constant(); 2363 int constant_to = to->GetInteger32Constant(); 2364 2365 if (constant_from == 0 && constant_to <= kLoopUnfoldLimit) { 2366 initial_capacity = constant_to; 2367 } 2368 } 2369 2370 // Since we're about to store a hole value, the store instruction below must 2371 // assume an elements kind that supports heap object values. 2372 if (IsFastSmiOrObjectElementsKind(elements_kind)) { 2373 elements_kind = FAST_HOLEY_ELEMENTS; 2374 } 2375 2376 if (initial_capacity >= 0) { 2377 for (int i = 0; i < initial_capacity; i++) { 2378 HInstruction* key = Add<HConstant>(i); 2379 Add<HStoreKeyed>(elements, key, hole, elements_kind); 2380 } 2381 } else { 2382 LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement); 2383 2384 HValue* key = builder.BeginBody(from, to, Token::LT); 2385 2386 Add<HStoreKeyed>(elements, key, hole, elements_kind); 2387 2388 builder.EndBody(); 2389 } 2390 } 2391 2392 2393 void HGraphBuilder::BuildCopyElements(HValue* from_elements, 2394 ElementsKind from_elements_kind, 2395 HValue* to_elements, 2396 ElementsKind to_elements_kind, 2397 HValue* length, 2398 HValue* capacity) { 2399 bool pre_fill_with_holes = 2400 IsFastDoubleElementsKind(from_elements_kind) && 2401 IsFastObjectElementsKind(to_elements_kind); 2402 2403 if (pre_fill_with_holes) { 2404 // If the copy might trigger a GC, make sure that the FixedArray is 2405 // pre-initialized with holes to make sure that it's always in a consistent 2406 // state. 2407 BuildFillElementsWithHole(to_elements, to_elements_kind, 2408 graph()->GetConstant0(), capacity); 2409 } 2410 2411 LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement); 2412 2413 HValue* key = builder.BeginBody(graph()->GetConstant0(), length, Token::LT); 2414 2415 HValue* element = Add<HLoadKeyed>(from_elements, key, 2416 static_cast<HValue*>(NULL), 2417 from_elements_kind, 2418 ALLOW_RETURN_HOLE); 2419 2420 ElementsKind kind = (IsHoleyElementsKind(from_elements_kind) && 2421 IsFastSmiElementsKind(to_elements_kind)) 2422 ? FAST_HOLEY_ELEMENTS : to_elements_kind; 2423 2424 if (IsHoleyElementsKind(from_elements_kind) && 2425 from_elements_kind != to_elements_kind) { 2426 IfBuilder if_hole(this); 2427 if_hole.If<HCompareHoleAndBranch>(element); 2428 if_hole.Then(); 2429 HConstant* hole_constant = IsFastDoubleElementsKind(to_elements_kind) 2430 ? Add<HConstant>(FixedDoubleArray::hole_nan_as_double()) 2431 : graph()->GetConstantHole(); 2432 Add<HStoreKeyed>(to_elements, key, hole_constant, kind); 2433 if_hole.Else(); 2434 HStoreKeyed* store = Add<HStoreKeyed>(to_elements, key, element, kind); 2435 store->SetFlag(HValue::kAllowUndefinedAsNaN); 2436 if_hole.End(); 2437 } else { 2438 HStoreKeyed* store = Add<HStoreKeyed>(to_elements, key, element, kind); 2439 store->SetFlag(HValue::kAllowUndefinedAsNaN); 2440 } 2441 2442 builder.EndBody(); 2443 2444 if (!pre_fill_with_holes && length != capacity) { 2445 // Fill unused capacity with the hole. 2446 BuildFillElementsWithHole(to_elements, to_elements_kind, 2447 key, capacity); 2448 } 2449 } 2450 2451 2452 HValue* HGraphBuilder::BuildCloneShallowArray(HValue* boilerplate, 2453 HValue* allocation_site, 2454 AllocationSiteMode mode, 2455 ElementsKind kind, 2456 int length) { 2457 NoObservableSideEffectsScope no_effects(this); 2458 2459 // All sizes here are multiples of kPointerSize. 2460 int size = JSArray::kSize; 2461 if (mode == TRACK_ALLOCATION_SITE) { 2462 size += AllocationMemento::kSize; 2463 } 2464 2465 HValue* size_in_bytes = Add<HConstant>(size); 2466 HInstruction* object = Add<HAllocate>(size_in_bytes, 2467 HType::JSObject(), 2468 NOT_TENURED, 2469 JS_OBJECT_TYPE); 2470 2471 // Copy the JS array part. 2472 for (int i = 0; i < JSArray::kSize; i += kPointerSize) { 2473 if ((i != JSArray::kElementsOffset) || (length == 0)) { 2474 HObjectAccess access = HObjectAccess::ForJSArrayOffset(i); 2475 Add<HStoreNamedField>(object, access, 2476 Add<HLoadNamedField>(boilerplate, access)); 2477 } 2478 } 2479 2480 // Create an allocation site info if requested. 2481 if (mode == TRACK_ALLOCATION_SITE) { 2482 BuildCreateAllocationMemento( 2483 object, Add<HConstant>(JSArray::kSize), allocation_site); 2484 } 2485 2486 if (length > 0) { 2487 HValue* boilerplate_elements = AddLoadElements(boilerplate); 2488 HValue* object_elements; 2489 if (IsFastDoubleElementsKind(kind)) { 2490 HValue* elems_size = Add<HConstant>(FixedDoubleArray::SizeFor(length)); 2491 object_elements = Add<HAllocate>(elems_size, HType::JSArray(), 2492 NOT_TENURED, FIXED_DOUBLE_ARRAY_TYPE); 2493 } else { 2494 HValue* elems_size = Add<HConstant>(FixedArray::SizeFor(length)); 2495 object_elements = Add<HAllocate>(elems_size, HType::JSArray(), 2496 NOT_TENURED, FIXED_ARRAY_TYPE); 2497 } 2498 Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(), 2499 object_elements); 2500 2501 // Copy the elements array header. 2502 for (int i = 0; i < FixedArrayBase::kHeaderSize; i += kPointerSize) { 2503 HObjectAccess access = HObjectAccess::ForFixedArrayHeader(i); 2504 Add<HStoreNamedField>(object_elements, access, 2505 Add<HLoadNamedField>(boilerplate_elements, access)); 2506 } 2507 2508 // Copy the elements array contents. 2509 // TODO(mstarzinger): Teach HGraphBuilder::BuildCopyElements to unfold 2510 // copying loops with constant length up to a given boundary and use this 2511 // helper here instead. 2512 for (int i = 0; i < length; i++) { 2513 HValue* key_constant = Add<HConstant>(i); 2514 HInstruction* value = Add<HLoadKeyed>(boilerplate_elements, key_constant, 2515 static_cast<HValue*>(NULL), kind); 2516 Add<HStoreKeyed>(object_elements, key_constant, value, kind); 2517 } 2518 } 2519 2520 return object; 2521 } 2522 2523 2524 void HGraphBuilder::BuildCompareNil( 2525 HValue* value, 2526 Handle<Type> type, 2527 HIfContinuation* continuation) { 2528 IfBuilder if_nil(this); 2529 bool some_case_handled = false; 2530 bool some_case_missing = false; 2531 2532 if (type->Maybe(Type::Null())) { 2533 if (some_case_handled) if_nil.Or(); 2534 if_nil.If<HCompareObjectEqAndBranch>(value, graph()->GetConstantNull()); 2535 some_case_handled = true; 2536 } else { 2537 some_case_missing = true; 2538 } 2539 2540 if (type->Maybe(Type::Undefined())) { 2541 if (some_case_handled) if_nil.Or(); 2542 if_nil.If<HCompareObjectEqAndBranch>(value, 2543 graph()->GetConstantUndefined()); 2544 some_case_handled = true; 2545 } else { 2546 some_case_missing = true; 2547 } 2548 2549 if (type->Maybe(Type::Undetectable())) { 2550 if (some_case_handled) if_nil.Or(); 2551 if_nil.If<HIsUndetectableAndBranch>(value); 2552 some_case_handled = true; 2553 } else { 2554 some_case_missing = true; 2555 } 2556 2557 if (some_case_missing) { 2558 if_nil.Then(); 2559 if_nil.Else(); 2560 if (type->NumClasses() == 1) { 2561 BuildCheckHeapObject(value); 2562 // For ICs, the map checked below is a sentinel map that gets replaced by 2563 // the monomorphic map when the code is used as a template to generate a 2564 // new IC. For optimized functions, there is no sentinel map, the map 2565 // emitted below is the actual monomorphic map. 2566 BuildCheckMap(value, type->Classes().Current()); 2567 } else { 2568 if_nil.Deopt("Too many undetectable types"); 2569 } 2570 } 2571 2572 if_nil.CaptureContinuation(continuation); 2573 } 2574 2575 2576 void HGraphBuilder::BuildCreateAllocationMemento( 2577 HValue* previous_object, 2578 HValue* previous_object_size, 2579 HValue* allocation_site) { 2580 ASSERT(allocation_site != NULL); 2581 HInnerAllocatedObject* allocation_memento = Add<HInnerAllocatedObject>( 2582 previous_object, previous_object_size); 2583 AddStoreMapConstant( 2584 allocation_memento, isolate()->factory()->allocation_memento_map()); 2585 Add<HStoreNamedField>( 2586 allocation_memento, 2587 HObjectAccess::ForAllocationMementoSite(), 2588 allocation_site); 2589 if (FLAG_allocation_site_pretenuring) { 2590 HValue* memento_create_count = Add<HLoadNamedField>( 2591 allocation_site, HObjectAccess::ForAllocationSiteOffset( 2592 AllocationSite::kMementoCreateCountOffset)); 2593 memento_create_count = AddUncasted<HAdd>( 2594 memento_create_count, graph()->GetConstant1()); 2595 HStoreNamedField* store = Add<HStoreNamedField>( 2596 allocation_site, HObjectAccess::ForAllocationSiteOffset( 2597 AllocationSite::kMementoCreateCountOffset), memento_create_count); 2598 // No write barrier needed to store a smi. 2599 store->SkipWriteBarrier(); 2600 } 2601 } 2602 2603 2604 HInstruction* HGraphBuilder::BuildGetNativeContext() { 2605 // Get the global context, then the native context 2606 HInstruction* global_object = Add<HGlobalObject>(); 2607 HObjectAccess access = HObjectAccess::ForJSObjectOffset( 2608 GlobalObject::kNativeContextOffset); 2609 return Add<HLoadNamedField>(global_object, access); 2610 } 2611 2612 2613 HInstruction* HGraphBuilder::BuildGetArrayFunction() { 2614 HInstruction* native_context = BuildGetNativeContext(); 2615 HInstruction* index = 2616 Add<HConstant>(static_cast<int32_t>(Context::ARRAY_FUNCTION_INDEX)); 2617 return Add<HLoadKeyed>( 2618 native_context, index, static_cast<HValue*>(NULL), FAST_ELEMENTS); 2619 } 2620 2621 2622 HGraphBuilder::JSArrayBuilder::JSArrayBuilder(HGraphBuilder* builder, 2623 ElementsKind kind, 2624 HValue* allocation_site_payload, 2625 HValue* constructor_function, 2626 AllocationSiteOverrideMode override_mode) : 2627 builder_(builder), 2628 kind_(kind), 2629 allocation_site_payload_(allocation_site_payload), 2630 constructor_function_(constructor_function) { 2631 mode_ = override_mode == DISABLE_ALLOCATION_SITES 2632 ? DONT_TRACK_ALLOCATION_SITE 2633 : AllocationSite::GetMode(kind); 2634 } 2635 2636 2637 HGraphBuilder::JSArrayBuilder::JSArrayBuilder(HGraphBuilder* builder, 2638 ElementsKind kind, 2639 HValue* constructor_function) : 2640 builder_(builder), 2641 kind_(kind), 2642 mode_(DONT_TRACK_ALLOCATION_SITE), 2643 allocation_site_payload_(NULL), 2644 constructor_function_(constructor_function) { 2645 } 2646 2647 2648 HValue* HGraphBuilder::JSArrayBuilder::EmitMapCode() { 2649 if (!builder()->top_info()->IsStub()) { 2650 // A constant map is fine. 2651 Handle<Map> map(builder()->isolate()->get_initial_js_array_map(kind_), 2652 builder()->isolate()); 2653 return builder()->Add<HConstant>(map); 2654 } 2655 2656 if (constructor_function_ != NULL && kind_ == GetInitialFastElementsKind()) { 2657 // No need for a context lookup if the kind_ matches the initial 2658 // map, because we can just load the map in that case. 2659 HObjectAccess access = HObjectAccess::ForPrototypeOrInitialMap(); 2660 return builder()->AddLoadNamedField(constructor_function_, access); 2661 } 2662 2663 HInstruction* native_context = builder()->BuildGetNativeContext(); 2664 HInstruction* index = builder()->Add<HConstant>( 2665 static_cast<int32_t>(Context::JS_ARRAY_MAPS_INDEX)); 2666 2667 HInstruction* map_array = builder()->Add<HLoadKeyed>( 2668 native_context, index, static_cast<HValue*>(NULL), FAST_ELEMENTS); 2669 2670 HInstruction* kind_index = builder()->Add<HConstant>(kind_); 2671 2672 return builder()->Add<HLoadKeyed>( 2673 map_array, kind_index, static_cast<HValue*>(NULL), FAST_ELEMENTS); 2674 } 2675 2676 2677 HValue* HGraphBuilder::JSArrayBuilder::EmitInternalMapCode() { 2678 // Find the map near the constructor function 2679 HObjectAccess access = HObjectAccess::ForPrototypeOrInitialMap(); 2680 return builder()->AddLoadNamedField(constructor_function_, access); 2681 } 2682 2683 2684 HValue* HGraphBuilder::JSArrayBuilder::EstablishAllocationSize( 2685 HValue* length_node) { 2686 ASSERT(length_node != NULL); 2687 2688 int base_size = JSArray::kSize; 2689 if (mode_ == TRACK_ALLOCATION_SITE) { 2690 base_size += AllocationMemento::kSize; 2691 } 2692 2693 STATIC_ASSERT(FixedDoubleArray::kHeaderSize == FixedArray::kHeaderSize); 2694 base_size += FixedArray::kHeaderSize; 2695 2696 HInstruction* elements_size_value = 2697 builder()->Add<HConstant>(elements_size()); 2698 HInstruction* mul = HMul::NewImul(builder()->zone(), builder()->context(), 2699 length_node, elements_size_value); 2700 builder()->AddInstruction(mul); 2701 HInstruction* base = builder()->Add<HConstant>(base_size); 2702 HInstruction* total_size = HAdd::New(builder()->zone(), builder()->context(), 2703 base, mul); 2704 total_size->ClearFlag(HValue::kCanOverflow); 2705 builder()->AddInstruction(total_size); 2706 return total_size; 2707 } 2708 2709 2710 HValue* HGraphBuilder::JSArrayBuilder::EstablishEmptyArrayAllocationSize() { 2711 int base_size = JSArray::kSize; 2712 if (mode_ == TRACK_ALLOCATION_SITE) { 2713 base_size += AllocationMemento::kSize; 2714 } 2715 2716 base_size += IsFastDoubleElementsKind(kind_) 2717 ? FixedDoubleArray::SizeFor(initial_capacity()) 2718 : FixedArray::SizeFor(initial_capacity()); 2719 2720 return builder()->Add<HConstant>(base_size); 2721 } 2722 2723 2724 HValue* HGraphBuilder::JSArrayBuilder::AllocateEmptyArray() { 2725 HValue* size_in_bytes = EstablishEmptyArrayAllocationSize(); 2726 HConstant* capacity = builder()->Add<HConstant>(initial_capacity()); 2727 return AllocateArray(size_in_bytes, 2728 capacity, 2729 builder()->graph()->GetConstant0()); 2730 } 2731 2732 2733 HValue* HGraphBuilder::JSArrayBuilder::AllocateArray(HValue* capacity, 2734 HValue* length_field, 2735 FillMode fill_mode) { 2736 HValue* size_in_bytes = EstablishAllocationSize(capacity); 2737 return AllocateArray(size_in_bytes, capacity, length_field, fill_mode); 2738 } 2739 2740 2741 HValue* HGraphBuilder::JSArrayBuilder::AllocateArray(HValue* size_in_bytes, 2742 HValue* capacity, 2743 HValue* length_field, 2744 FillMode fill_mode) { 2745 // These HForceRepresentations are because we store these as fields in the 2746 // objects we construct, and an int32-to-smi HChange could deopt. Accept 2747 // the deopt possibility now, before allocation occurs. 2748 capacity = 2749 builder()->AddUncasted<HForceRepresentation>(capacity, 2750 Representation::Smi()); 2751 length_field = 2752 builder()->AddUncasted<HForceRepresentation>(length_field, 2753 Representation::Smi()); 2754 // Allocate (dealing with failure appropriately) 2755 HAllocate* new_object = builder()->Add<HAllocate>(size_in_bytes, 2756 HType::JSArray(), NOT_TENURED, JS_ARRAY_TYPE); 2757 2758 // Folded array allocation should be aligned if it has fast double elements. 2759 if (IsFastDoubleElementsKind(kind_)) { 2760 new_object->MakeDoubleAligned(); 2761 } 2762 2763 // Fill in the fields: map, properties, length 2764 HValue* map; 2765 if (allocation_site_payload_ == NULL) { 2766 map = EmitInternalMapCode(); 2767 } else { 2768 map = EmitMapCode(); 2769 } 2770 elements_location_ = builder()->BuildJSArrayHeader(new_object, 2771 map, 2772 mode_, 2773 kind_, 2774 allocation_site_payload_, 2775 length_field); 2776 2777 // Initialize the elements 2778 builder()->BuildInitializeElementsHeader(elements_location_, kind_, capacity); 2779 2780 if (fill_mode == FILL_WITH_HOLE) { 2781 builder()->BuildFillElementsWithHole(elements_location_, kind_, 2782 graph()->GetConstant0(), capacity); 2783 } 2784 2785 return new_object; 2786 } 2787 2788 2789 HStoreNamedField* HGraphBuilder::AddStoreMapConstant(HValue *object, 2790 Handle<Map> map) { 2791 return Add<HStoreNamedField>(object, HObjectAccess::ForMap(), 2792 Add<HConstant>(map)); 2793 } 2794 2795 2796 HValue* HGraphBuilder::AddLoadJSBuiltin(Builtins::JavaScript builtin) { 2797 HGlobalObject* global_object = Add<HGlobalObject>(); 2798 HObjectAccess access = HObjectAccess::ForJSObjectOffset( 2799 GlobalObject::kBuiltinsOffset); 2800 HValue* builtins = Add<HLoadNamedField>(global_object, access); 2801 HObjectAccess function_access = HObjectAccess::ForJSObjectOffset( 2802 JSBuiltinsObject::OffsetOfFunctionWithId(builtin)); 2803 return Add<HLoadNamedField>(builtins, function_access); 2804 } 2805 2806 2807 HOptimizedGraphBuilder::HOptimizedGraphBuilder(CompilationInfo* info) 2808 : HGraphBuilder(info), 2809 function_state_(NULL), 2810 initial_function_state_(this, info, NORMAL_RETURN), 2811 ast_context_(NULL), 2812 break_scope_(NULL), 2813 inlined_count_(0), 2814 globals_(10, info->zone()), 2815 inline_bailout_(false), 2816 osr_(new(info->zone()) HOsrBuilder(this)) { 2817 // This is not initialized in the initializer list because the 2818 // constructor for the initial state relies on function_state_ == NULL 2819 // to know it's the initial state. 2820 function_state_= &initial_function_state_; 2821 InitializeAstVisitor(info->isolate()); 2822 if (FLAG_emit_opt_code_positions) { 2823 SetSourcePosition(info->shared_info()->start_position()); 2824 } 2825 } 2826 2827 2828 HBasicBlock* HOptimizedGraphBuilder::CreateJoin(HBasicBlock* first, 2829 HBasicBlock* second, 2830 BailoutId join_id) { 2831 if (first == NULL) { 2832 return second; 2833 } else if (second == NULL) { 2834 return first; 2835 } else { 2836 HBasicBlock* join_block = graph()->CreateBasicBlock(); 2837 Goto(first, join_block); 2838 Goto(second, join_block); 2839 join_block->SetJoinId(join_id); 2840 return join_block; 2841 } 2842 } 2843 2844 2845 HBasicBlock* HOptimizedGraphBuilder::JoinContinue(IterationStatement* statement, 2846 HBasicBlock* exit_block, 2847 HBasicBlock* continue_block) { 2848 if (continue_block != NULL) { 2849 if (exit_block != NULL) Goto(exit_block, continue_block); 2850 continue_block->SetJoinId(statement->ContinueId()); 2851 return continue_block; 2852 } 2853 return exit_block; 2854 } 2855 2856 2857 HBasicBlock* HOptimizedGraphBuilder::CreateLoop(IterationStatement* statement, 2858 HBasicBlock* loop_entry, 2859 HBasicBlock* body_exit, 2860 HBasicBlock* loop_successor, 2861 HBasicBlock* break_block) { 2862 if (body_exit != NULL) Goto(body_exit, loop_entry); 2863 loop_entry->PostProcessLoopHeader(statement); 2864 if (break_block != NULL) { 2865 if (loop_successor != NULL) Goto(loop_successor, break_block); 2866 break_block->SetJoinId(statement->ExitId()); 2867 return break_block; 2868 } 2869 return loop_successor; 2870 } 2871 2872 2873 // Build a new loop header block and set it as the current block. 2874 HBasicBlock* HOptimizedGraphBuilder::BuildLoopEntry() { 2875 HBasicBlock* loop_entry = CreateLoopHeaderBlock(); 2876 Goto(loop_entry); 2877 set_current_block(loop_entry); 2878 return loop_entry; 2879 } 2880 2881 2882 HBasicBlock* HOptimizedGraphBuilder::BuildLoopEntry( 2883 IterationStatement* statement) { 2884 HBasicBlock* loop_entry = osr()->HasOsrEntryAt(statement) 2885 ? osr()->BuildOsrLoopEntry(statement) 2886 : BuildLoopEntry(); 2887 return loop_entry; 2888 } 2889 2890 2891 void HBasicBlock::FinishExit(HControlInstruction* instruction, int position) { 2892 Finish(instruction, position); 2893 ClearEnvironment(); 2894 } 2895 2896 2897 HGraph::HGraph(CompilationInfo* info) 2898 : isolate_(info->isolate()), 2899 next_block_id_(0), 2900 entry_block_(NULL), 2901 blocks_(8, info->zone()), 2902 values_(16, info->zone()), 2903 phi_list_(NULL), 2904 uint32_instructions_(NULL), 2905 osr_(NULL), 2906 info_(info), 2907 zone_(info->zone()), 2908 is_recursive_(false), 2909 use_optimistic_licm_(false), 2910 depends_on_empty_array_proto_elements_(false), 2911 type_change_checksum_(0), 2912 maximum_environment_size_(0), 2913 no_side_effects_scope_count_(0), 2914 disallow_adding_new_values_(false) { 2915 if (info->IsStub()) { 2916 HydrogenCodeStub* stub = info->code_stub(); 2917 CodeStubInterfaceDescriptor* descriptor = 2918 stub->GetInterfaceDescriptor(isolate_); 2919 start_environment_ = 2920 new(zone_) HEnvironment(zone_, descriptor->environment_length()); 2921 } else { 2922 start_environment_ = 2923 new(zone_) HEnvironment(NULL, info->scope(), info->closure(), zone_); 2924 } 2925 start_environment_->set_ast_id(BailoutId::FunctionEntry()); 2926 entry_block_ = CreateBasicBlock(); 2927 entry_block_->SetInitialEnvironment(start_environment_); 2928 } 2929 2930 2931 HBasicBlock* HGraph::CreateBasicBlock() { 2932 HBasicBlock* result = new(zone()) HBasicBlock(this); 2933 blocks_.Add(result, zone()); 2934 return result; 2935 } 2936 2937 2938 void HGraph::FinalizeUniqueness() { 2939 DisallowHeapAllocation no_gc; 2940 ASSERT(!OptimizingCompilerThread::IsOptimizerThread(isolate())); 2941 for (int i = 0; i < blocks()->length(); ++i) { 2942 for (HInstructionIterator it(blocks()->at(i)); !it.Done(); it.Advance()) { 2943 it.Current()->FinalizeUniqueness(); 2944 } 2945 } 2946 } 2947 2948 2949 // Block ordering was implemented with two mutually recursive methods, 2950 // HGraph::Postorder and HGraph::PostorderLoopBlocks. 2951 // The recursion could lead to stack overflow so the algorithm has been 2952 // implemented iteratively. 2953 // At a high level the algorithm looks like this: 2954 // 2955 // Postorder(block, loop_header) : { 2956 // if (block has already been visited or is of another loop) return; 2957 // mark block as visited; 2958 // if (block is a loop header) { 2959 // VisitLoopMembers(block, loop_header); 2960 // VisitSuccessorsOfLoopHeader(block); 2961 // } else { 2962 // VisitSuccessors(block) 2963 // } 2964 // put block in result list; 2965 // } 2966 // 2967 // VisitLoopMembers(block, outer_loop_header) { 2968 // foreach (block b in block loop members) { 2969 // VisitSuccessorsOfLoopMember(b, outer_loop_header); 2970 // if (b is loop header) VisitLoopMembers(b); 2971 // } 2972 // } 2973 // 2974 // VisitSuccessorsOfLoopMember(block, outer_loop_header) { 2975 // foreach (block b in block successors) Postorder(b, outer_loop_header) 2976 // } 2977 // 2978 // VisitSuccessorsOfLoopHeader(block) { 2979 // foreach (block b in block successors) Postorder(b, block) 2980 // } 2981 // 2982 // VisitSuccessors(block, loop_header) { 2983 // foreach (block b in block successors) Postorder(b, loop_header) 2984 // } 2985 // 2986 // The ordering is started calling Postorder(entry, NULL). 2987 // 2988 // Each instance of PostorderProcessor represents the "stack frame" of the 2989 // recursion, and particularly keeps the state of the loop (iteration) of the 2990 // "Visit..." function it represents. 2991 // To recycle memory we keep all the frames in a double linked list but 2992 // this means that we cannot use constructors to initialize the frames. 2993 // 2994 class PostorderProcessor : public ZoneObject { 2995 public: 2996 // Back link (towards the stack bottom). 2997 PostorderProcessor* parent() {return father_; } 2998 // Forward link (towards the stack top). 2999 PostorderProcessor* child() {return child_; } 3000 HBasicBlock* block() { return block_; } 3001 HLoopInformation* loop() { return loop_; } 3002 HBasicBlock* loop_header() { return loop_header_; } 3003 3004 static PostorderProcessor* CreateEntryProcessor(Zone* zone, 3005 HBasicBlock* block, 3006 BitVector* visited) { 3007 PostorderProcessor* result = new(zone) PostorderProcessor(NULL); 3008 return result->SetupSuccessors(zone, block, NULL, visited); 3009 } 3010 3011 PostorderProcessor* PerformStep(Zone* zone, 3012 BitVector* visited, 3013 ZoneList<HBasicBlock*>* order) { 3014 PostorderProcessor* next = 3015 PerformNonBacktrackingStep(zone, visited, order); 3016 if (next != NULL) { 3017 return next; 3018 } else { 3019 return Backtrack(zone, visited, order); 3020 } 3021 } 3022 3023 private: 3024 explicit PostorderProcessor(PostorderProcessor* father) 3025 : father_(father), child_(NULL), successor_iterator(NULL) { } 3026 3027 // Each enum value states the cycle whose state is kept by this instance. 3028 enum LoopKind { 3029 NONE, 3030 SUCCESSORS, 3031 SUCCESSORS_OF_LOOP_HEADER, 3032 LOOP_MEMBERS, 3033 SUCCESSORS_OF_LOOP_MEMBER 3034 }; 3035 3036 // Each "Setup..." method is like a constructor for a cycle state. 3037 PostorderProcessor* SetupSuccessors(Zone* zone, 3038 HBasicBlock* block, 3039 HBasicBlock* loop_header, 3040 BitVector* visited) { 3041 if (block == NULL || visited->Contains(block->block_id()) || 3042 block->parent_loop_header() != loop_header) { 3043 kind_ = NONE; 3044 block_ = NULL; 3045 loop_ = NULL; 3046 loop_header_ = NULL; 3047 return this; 3048 } else { 3049 block_ = block; 3050 loop_ = NULL; 3051 visited->Add(block->block_id()); 3052 3053 if (block->IsLoopHeader()) { 3054 kind_ = SUCCESSORS_OF_LOOP_HEADER; 3055 loop_header_ = block; 3056 InitializeSuccessors(); 3057 PostorderProcessor* result = Push(zone); 3058 return result->SetupLoopMembers(zone, block, block->loop_information(), 3059 loop_header); 3060 } else { 3061 ASSERT(block->IsFinished()); 3062 kind_ = SUCCESSORS; 3063 loop_header_ = loop_header; 3064 InitializeSuccessors(); 3065 return this; 3066 } 3067 } 3068 } 3069 3070 PostorderProcessor* SetupLoopMembers(Zone* zone, 3071 HBasicBlock* block, 3072 HLoopInformation* loop, 3073 HBasicBlock* loop_header) { 3074 kind_ = LOOP_MEMBERS; 3075 block_ = block; 3076 loop_ = loop; 3077 loop_header_ = loop_header; 3078 InitializeLoopMembers(); 3079 return this; 3080 } 3081 3082 PostorderProcessor* SetupSuccessorsOfLoopMember( 3083 HBasicBlock* block, 3084 HLoopInformation* loop, 3085 HBasicBlock* loop_header) { 3086 kind_ = SUCCESSORS_OF_LOOP_MEMBER; 3087 block_ = block; 3088 loop_ = loop; 3089 loop_header_ = loop_header; 3090 InitializeSuccessors(); 3091 return this; 3092 } 3093 3094 // This method "allocates" a new stack frame. 3095 PostorderProcessor* Push(Zone* zone) { 3096 if (child_ == NULL) { 3097 child_ = new(zone) PostorderProcessor(this); 3098 } 3099 return child_; 3100 } 3101 3102 void ClosePostorder(ZoneList<HBasicBlock*>* order, Zone* zone) { 3103 ASSERT(block_->end()->FirstSuccessor() == NULL || 3104 order->Contains(block_->end()->FirstSuccessor()) || 3105 block_->end()->FirstSuccessor()->IsLoopHeader()); 3106 ASSERT(block_->end()->SecondSuccessor() == NULL || 3107 order->Contains(block_->end()->SecondSuccessor()) || 3108 block_->end()->SecondSuccessor()->IsLoopHeader()); 3109 order->Add(block_, zone); 3110 } 3111 3112 // This method is the basic block to walk up the stack. 3113 PostorderProcessor* Pop(Zone* zone, 3114 BitVector* visited, 3115 ZoneList<HBasicBlock*>* order) { 3116 switch (kind_) { 3117 case SUCCESSORS: 3118 case SUCCESSORS_OF_LOOP_HEADER: 3119 ClosePostorder(order, zone); 3120 return father_; 3121 case LOOP_MEMBERS: 3122 return father_; 3123 case SUCCESSORS_OF_LOOP_MEMBER: 3124 if (block()->IsLoopHeader() && block() != loop_->loop_header()) { 3125 // In this case we need to perform a LOOP_MEMBERS cycle so we 3126 // initialize it and return this instead of father. 3127 return SetupLoopMembers(zone, block(), 3128 block()->loop_information(), loop_header_); 3129 } else { 3130 return father_; 3131 } 3132 case NONE: 3133 return father_; 3134 } 3135 UNREACHABLE(); 3136 return NULL; 3137 } 3138 3139 // Walks up the stack. 3140 PostorderProcessor* Backtrack(Zone* zone, 3141 BitVector* visited, 3142 ZoneList<HBasicBlock*>* order) { 3143 PostorderProcessor* parent = Pop(zone, visited, order); 3144 while (parent != NULL) { 3145 PostorderProcessor* next = 3146 parent->PerformNonBacktrackingStep(zone, visited, order); 3147 if (next != NULL) { 3148 return next; 3149 } else { 3150 parent = parent->Pop(zone, visited, order); 3151 } 3152 } 3153 return NULL; 3154 } 3155 3156 PostorderProcessor* PerformNonBacktrackingStep( 3157 Zone* zone, 3158 BitVector* visited, 3159 ZoneList<HBasicBlock*>* order) { 3160 HBasicBlock* next_block; 3161 switch (kind_) { 3162 case SUCCESSORS: 3163 next_block = AdvanceSuccessors(); 3164 if (next_block != NULL) { 3165 PostorderProcessor* result = Push(zone); 3166 return result->SetupSuccessors(zone, next_block, 3167 loop_header_, visited); 3168 } 3169 break; 3170 case SUCCESSORS_OF_LOOP_HEADER: 3171 next_block = AdvanceSuccessors(); 3172 if (next_block != NULL) { 3173 PostorderProcessor* result = Push(zone); 3174 return result->SetupSuccessors(zone, next_block, 3175 block(), visited); 3176 } 3177 break; 3178 case LOOP_MEMBERS: 3179 next_block = AdvanceLoopMembers(); 3180 if (next_block != NULL) { 3181 PostorderProcessor* result = Push(zone); 3182 return result->SetupSuccessorsOfLoopMember(next_block, 3183 loop_, loop_header_); 3184 } 3185 break; 3186 case SUCCESSORS_OF_LOOP_MEMBER: 3187 next_block = AdvanceSuccessors(); 3188 if (next_block != NULL) { 3189 PostorderProcessor* result = Push(zone); 3190 return result->SetupSuccessors(zone, next_block, 3191 loop_header_, visited); 3192 } 3193 break; 3194 case NONE: 3195 return NULL; 3196 } 3197 return NULL; 3198 } 3199 3200 // The following two methods implement a "foreach b in successors" cycle. 3201 void InitializeSuccessors() { 3202 loop_index = 0; 3203 loop_length = 0; 3204 successor_iterator = HSuccessorIterator(block_->end()); 3205 } 3206 3207 HBasicBlock* AdvanceSuccessors() { 3208 if (!successor_iterator.Done()) { 3209 HBasicBlock* result = successor_iterator.Current(); 3210 successor_iterator.Advance(); 3211 return result; 3212 } 3213 return NULL; 3214 } 3215 3216 // The following two methods implement a "foreach b in loop members" cycle. 3217 void InitializeLoopMembers() { 3218 loop_index = 0; 3219 loop_length = loop_->blocks()->length(); 3220 } 3221 3222 HBasicBlock* AdvanceLoopMembers() { 3223 if (loop_index < loop_length) { 3224 HBasicBlock* result = loop_->blocks()->at(loop_index); 3225 loop_index++; 3226 return result; 3227 } else { 3228 return NULL; 3229 } 3230 } 3231 3232 LoopKind kind_; 3233 PostorderProcessor* father_; 3234 PostorderProcessor* child_; 3235 HLoopInformation* loop_; 3236 HBasicBlock* block_; 3237 HBasicBlock* loop_header_; 3238 int loop_index; 3239 int loop_length; 3240 HSuccessorIterator successor_iterator; 3241 }; 3242 3243 3244 void HGraph::OrderBlocks() { 3245 CompilationPhase phase("H_Block ordering", info()); 3246 BitVector visited(blocks_.length(), zone()); 3247 3248 ZoneList<HBasicBlock*> reverse_result(8, zone()); 3249 HBasicBlock* start = blocks_[0]; 3250 PostorderProcessor* postorder = 3251 PostorderProcessor::CreateEntryProcessor(zone(), start, &visited); 3252 while (postorder != NULL) { 3253 postorder = postorder->PerformStep(zone(), &visited, &reverse_result); 3254 } 3255 blocks_.Rewind(0); 3256 int index = 0; 3257 for (int i = reverse_result.length() - 1; i >= 0; --i) { 3258 HBasicBlock* b = reverse_result[i]; 3259 blocks_.Add(b, zone()); 3260 b->set_block_id(index++); 3261 } 3262 } 3263 3264 3265 void HGraph::AssignDominators() { 3266 HPhase phase("H_Assign dominators", this); 3267 for (int i = 0; i < blocks_.length(); ++i) { 3268 HBasicBlock* block = blocks_[i]; 3269 if (block->IsLoopHeader()) { 3270 // Only the first predecessor of a loop header is from outside the loop. 3271 // All others are back edges, and thus cannot dominate the loop header. 3272 block->AssignCommonDominator(block->predecessors()->first()); 3273 block->AssignLoopSuccessorDominators(); 3274 } else { 3275 for (int j = blocks_[i]->predecessors()->length() - 1; j >= 0; --j) { 3276 blocks_[i]->AssignCommonDominator(blocks_[i]->predecessors()->at(j)); 3277 } 3278 } 3279 } 3280 } 3281 3282 3283 bool HGraph::CheckArgumentsPhiUses() { 3284 int block_count = blocks_.length(); 3285 for (int i = 0; i < block_count; ++i) { 3286 for (int j = 0; j < blocks_[i]->phis()->length(); ++j) { 3287 HPhi* phi = blocks_[i]->phis()->at(j); 3288 // We don't support phi uses of arguments for now. 3289 if (phi->CheckFlag(HValue::kIsArguments)) return false; 3290 } 3291 } 3292 return true; 3293 } 3294 3295 3296 bool HGraph::CheckConstPhiUses() { 3297 int block_count = blocks_.length(); 3298 for (int i = 0; i < block_count; ++i) { 3299 for (int j = 0; j < blocks_[i]->phis()->length(); ++j) { 3300 HPhi* phi = blocks_[i]->phis()->at(j); 3301 // Check for the hole value (from an uninitialized const). 3302 for (int k = 0; k < phi->OperandCount(); k++) { 3303 if (phi->OperandAt(k) == GetConstantHole()) return false; 3304 } 3305 } 3306 } 3307 return true; 3308 } 3309 3310 3311 void HGraph::CollectPhis() { 3312 int block_count = blocks_.length(); 3313 phi_list_ = new(zone()) ZoneList<HPhi*>(block_count, zone()); 3314 for (int i = 0; i < block_count; ++i) { 3315 for (int j = 0; j < blocks_[i]->phis()->length(); ++j) { 3316 HPhi* phi = blocks_[i]->phis()->at(j); 3317 phi_list_->Add(phi, zone()); 3318 } 3319 } 3320 } 3321 3322 3323 // Implementation of utility class to encapsulate the translation state for 3324 // a (possibly inlined) function. 3325 FunctionState::FunctionState(HOptimizedGraphBuilder* owner, 3326 CompilationInfo* info, 3327 InliningKind inlining_kind) 3328 : owner_(owner), 3329 compilation_info_(info), 3330 call_context_(NULL), 3331 inlining_kind_(inlining_kind), 3332 function_return_(NULL), 3333 test_context_(NULL), 3334 entry_(NULL), 3335 arguments_object_(NULL), 3336 arguments_elements_(NULL), 3337 outer_(owner->function_state()) { 3338 if (outer_ != NULL) { 3339 // State for an inline function. 3340 if (owner->ast_context()->IsTest()) { 3341 HBasicBlock* if_true = owner->graph()->CreateBasicBlock(); 3342 HBasicBlock* if_false = owner->graph()->CreateBasicBlock(); 3343 if_true->MarkAsInlineReturnTarget(owner->current_block()); 3344 if_false->MarkAsInlineReturnTarget(owner->current_block()); 3345 TestContext* outer_test_context = TestContext::cast(owner->ast_context()); 3346 Expression* cond = outer_test_context->condition(); 3347 // The AstContext constructor pushed on the context stack. This newed 3348 // instance is the reason that AstContext can't be BASE_EMBEDDED. 3349 test_context_ = new TestContext(owner, cond, if_true, if_false); 3350 } else { 3351 function_return_ = owner->graph()->CreateBasicBlock(); 3352 function_return()->MarkAsInlineReturnTarget(owner->current_block()); 3353 } 3354 // Set this after possibly allocating a new TestContext above. 3355 call_context_ = owner->ast_context(); 3356 } 3357 3358 // Push on the state stack. 3359 owner->set_function_state(this); 3360 } 3361 3362 3363 FunctionState::~FunctionState() { 3364 delete test_context_; 3365 owner_->set_function_state(outer_); 3366 } 3367 3368 3369 // Implementation of utility classes to represent an expression's context in 3370 // the AST. 3371 AstContext::AstContext(HOptimizedGraphBuilder* owner, Expression::Context kind) 3372 : owner_(owner), 3373 kind_(kind), 3374 outer_(owner->ast_context()), 3375 for_typeof_(false) { 3376 owner->set_ast_context(this); // Push. 3377 #ifdef DEBUG 3378 ASSERT(owner->environment()->frame_type() == JS_FUNCTION); 3379 original_length_ = owner->environment()->length(); 3380 #endif 3381 } 3382 3383 3384 AstContext::~AstContext() { 3385 owner_->set_ast_context(outer_); // Pop. 3386 } 3387 3388 3389 EffectContext::~EffectContext() { 3390 ASSERT(owner()->HasStackOverflow() || 3391 owner()->current_block() == NULL || 3392 (owner()->environment()->length() == original_length_ && 3393 owner()->environment()->frame_type() == JS_FUNCTION)); 3394 } 3395 3396 3397 ValueContext::~ValueContext() { 3398 ASSERT(owner()->HasStackOverflow() || 3399 owner()->current_block() == NULL || 3400 (owner()->environment()->length() == original_length_ + 1 && 3401 owner()->environment()->frame_type() == JS_FUNCTION)); 3402 } 3403 3404 3405 void EffectContext::ReturnValue(HValue* value) { 3406 // The value is simply ignored. 3407 } 3408 3409 3410 void ValueContext::ReturnValue(HValue* value) { 3411 // The value is tracked in the bailout environment, and communicated 3412 // through the environment as the result of the expression. 3413 if (!arguments_allowed() && value->CheckFlag(HValue::kIsArguments)) { 3414 owner()->Bailout(kBadValueContextForArgumentsValue); 3415 } 3416 owner()->Push(value); 3417 } 3418 3419 3420 void TestContext::ReturnValue(HValue* value) { 3421 BuildBranch(value); 3422 } 3423 3424 3425 void EffectContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) { 3426 ASSERT(!instr->IsControlInstruction()); 3427 owner()->AddInstruction(instr); 3428 if (instr->HasObservableSideEffects()) { 3429 owner()->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE); 3430 } 3431 } 3432 3433 3434 void EffectContext::ReturnControl(HControlInstruction* instr, 3435 BailoutId ast_id) { 3436 ASSERT(!instr->HasObservableSideEffects()); 3437 HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock(); 3438 HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock(); 3439 instr->SetSuccessorAt(0, empty_true); 3440 instr->SetSuccessorAt(1, empty_false); 3441 owner()->FinishCurrentBlock(instr); 3442 HBasicBlock* join = owner()->CreateJoin(empty_true, empty_false, ast_id); 3443 owner()->set_current_block(join); 3444 } 3445 3446 3447 void EffectContext::ReturnContinuation(HIfContinuation* continuation, 3448 BailoutId ast_id) { 3449 HBasicBlock* true_branch = NULL; 3450 HBasicBlock* false_branch = NULL; 3451 continuation->Continue(&true_branch, &false_branch); 3452 if (!continuation->IsTrueReachable()) { 3453 owner()->set_current_block(false_branch); 3454 } else if (!continuation->IsFalseReachable()) { 3455 owner()->set_current_block(true_branch); 3456 } else { 3457 HBasicBlock* join = owner()->CreateJoin(true_branch, false_branch, ast_id); 3458 owner()->set_current_block(join); 3459 } 3460 } 3461 3462 3463 void ValueContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) { 3464 ASSERT(!instr->IsControlInstruction()); 3465 if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) { 3466 return owner()->Bailout(kBadValueContextForArgumentsObjectValue); 3467 } 3468 owner()->AddInstruction(instr); 3469 owner()->Push(instr); 3470 if (instr->HasObservableSideEffects()) { 3471 owner()->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE); 3472 } 3473 } 3474 3475 3476 void ValueContext::ReturnControl(HControlInstruction* instr, BailoutId ast_id) { 3477 ASSERT(!instr->HasObservableSideEffects()); 3478 if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) { 3479 return owner()->Bailout(kBadValueContextForArgumentsObjectValue); 3480 } 3481 HBasicBlock* materialize_false = owner()->graph()->CreateBasicBlock(); 3482 HBasicBlock* materialize_true = owner()->graph()->CreateBasicBlock(); 3483 instr->SetSuccessorAt(0, materialize_true); 3484 instr->SetSuccessorAt(1, materialize_false); 3485 owner()->FinishCurrentBlock(instr); 3486 owner()->set_current_block(materialize_true); 3487 owner()->Push(owner()->graph()->GetConstantTrue()); 3488 owner()->set_current_block(materialize_false); 3489 owner()->Push(owner()->graph()->GetConstantFalse()); 3490 HBasicBlock* join = 3491 owner()->CreateJoin(materialize_true, materialize_false, ast_id); 3492 owner()->set_current_block(join); 3493 } 3494 3495 3496 void ValueContext::ReturnContinuation(HIfContinuation* continuation, 3497 BailoutId ast_id) { 3498 HBasicBlock* materialize_true = NULL; 3499 HBasicBlock* materialize_false = NULL; 3500 continuation->Continue(&materialize_true, &materialize_false); 3501 if (continuation->IsTrueReachable()) { 3502 owner()->set_current_block(materialize_true); 3503 owner()->Push(owner()->graph()->GetConstantTrue()); 3504 owner()->set_current_block(materialize_true); 3505 } 3506 if (continuation->IsFalseReachable()) { 3507 owner()->set_current_block(materialize_false); 3508 owner()->Push(owner()->graph()->GetConstantFalse()); 3509 owner()->set_current_block(materialize_false); 3510 } 3511 if (continuation->TrueAndFalseReachable()) { 3512 HBasicBlock* join = 3513 owner()->CreateJoin(materialize_true, materialize_false, ast_id); 3514 owner()->set_current_block(join); 3515 } 3516 } 3517 3518 3519 void TestContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) { 3520 ASSERT(!instr->IsControlInstruction()); 3521 HOptimizedGraphBuilder* builder = owner(); 3522 builder->AddInstruction(instr); 3523 // We expect a simulate after every expression with side effects, though 3524 // this one isn't actually needed (and wouldn't work if it were targeted). 3525 if (instr->HasObservableSideEffects()) { 3526 builder->Push(instr); 3527 builder->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE); 3528 builder->Pop(); 3529 } 3530 BuildBranch(instr); 3531 } 3532 3533 3534 void TestContext::ReturnControl(HControlInstruction* instr, BailoutId ast_id) { 3535 ASSERT(!instr->HasObservableSideEffects()); 3536 HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock(); 3537 HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock(); 3538 instr->SetSuccessorAt(0, empty_true); 3539 instr->SetSuccessorAt(1, empty_false); 3540 owner()->FinishCurrentBlock(instr); 3541 owner()->Goto(empty_true, if_true(), owner()->function_state()); 3542 owner()->Goto(empty_false, if_false(), owner()->function_state()); 3543 owner()->set_current_block(NULL); 3544 } 3545 3546 3547 void TestContext::ReturnContinuation(HIfContinuation* continuation, 3548 BailoutId ast_id) { 3549 HBasicBlock* true_branch = NULL; 3550 HBasicBlock* false_branch = NULL; 3551 continuation->Continue(&true_branch, &false_branch); 3552 if (continuation->IsTrueReachable()) { 3553 owner()->Goto(true_branch, if_true(), owner()->function_state()); 3554 } 3555 if (continuation->IsFalseReachable()) { 3556 owner()->Goto(false_branch, if_false(), owner()->function_state()); 3557 } 3558 owner()->set_current_block(NULL); 3559 } 3560 3561 3562 void TestContext::BuildBranch(HValue* value) { 3563 // We expect the graph to be in edge-split form: there is no edge that 3564 // connects a branch node to a join node. We conservatively ensure that 3565 // property by always adding an empty block on the outgoing edges of this 3566 // branch. 3567 HOptimizedGraphBuilder* builder = owner(); 3568 if (value != NULL && value->CheckFlag(HValue::kIsArguments)) { 3569 builder->Bailout(kArgumentsObjectValueInATestContext); 3570 } 3571 ToBooleanStub::Types expected(condition()->to_boolean_types()); 3572 ReturnControl(owner()->New<HBranch>(value, expected), BailoutId::None()); 3573 } 3574 3575 3576 // HOptimizedGraphBuilder infrastructure for bailing out and checking bailouts. 3577 #define CHECK_BAILOUT(call) \ 3578 do { \ 3579 call; \ 3580 if (HasStackOverflow()) return; \ 3581 } while (false) 3582 3583 3584 #define CHECK_ALIVE(call) \ 3585 do { \ 3586 call; \ 3587 if (HasStackOverflow() || current_block() == NULL) return; \ 3588 } while (false) 3589 3590 3591 #define CHECK_ALIVE_OR_RETURN(call, value) \ 3592 do { \ 3593 call; \ 3594 if (HasStackOverflow() || current_block() == NULL) return value; \ 3595 } while (false) 3596 3597 3598 void HOptimizedGraphBuilder::Bailout(BailoutReason reason) { 3599 current_info()->set_bailout_reason(reason); 3600 SetStackOverflow(); 3601 } 3602 3603 3604 void HOptimizedGraphBuilder::VisitForEffect(Expression* expr) { 3605 EffectContext for_effect(this); 3606 Visit(expr); 3607 } 3608 3609 3610 void HOptimizedGraphBuilder::VisitForValue(Expression* expr, 3611 ArgumentsAllowedFlag flag) { 3612 ValueContext for_value(this, flag); 3613 Visit(expr); 3614 } 3615 3616 3617 void HOptimizedGraphBuilder::VisitForTypeOf(Expression* expr) { 3618 ValueContext for_value(this, ARGUMENTS_NOT_ALLOWED); 3619 for_value.set_for_typeof(true); 3620 Visit(expr); 3621 } 3622 3623 3624 3625 void HOptimizedGraphBuilder::VisitForControl(Expression* expr, 3626 HBasicBlock* true_block, 3627 HBasicBlock* false_block) { 3628 TestContext for_test(this, expr, true_block, false_block); 3629 Visit(expr); 3630 } 3631 3632 3633 void HOptimizedGraphBuilder::VisitArgument(Expression* expr) { 3634 CHECK_ALIVE(VisitForValue(expr)); 3635 Push(Add<HPushArgument>(Pop())); 3636 } 3637 3638 3639 void HOptimizedGraphBuilder::VisitArgumentList( 3640 ZoneList<Expression*>* arguments) { 3641 for (int i = 0; i < arguments->length(); i++) { 3642 CHECK_ALIVE(VisitArgument(arguments->at(i))); 3643 } 3644 } 3645 3646 3647 void HOptimizedGraphBuilder::VisitExpressions( 3648 ZoneList<Expression*>* exprs) { 3649 for (int i = 0; i < exprs->length(); ++i) { 3650 CHECK_ALIVE(VisitForValue(exprs->at(i))); 3651 } 3652 } 3653 3654 3655 bool HOptimizedGraphBuilder::BuildGraph() { 3656 if (current_info()->function()->is_generator()) { 3657 Bailout(kFunctionIsAGenerator); 3658 return false; 3659 } 3660 Scope* scope = current_info()->scope(); 3661 if (scope->HasIllegalRedeclaration()) { 3662 Bailout(kFunctionWithIllegalRedeclaration); 3663 return false; 3664 } 3665 if (scope->calls_eval()) { 3666 Bailout(kFunctionCallsEval); 3667 return false; 3668 } 3669 SetUpScope(scope); 3670 3671 // Add an edge to the body entry. This is warty: the graph's start 3672 // environment will be used by the Lithium translation as the initial 3673 // environment on graph entry, but it has now been mutated by the 3674 // Hydrogen translation of the instructions in the start block. This 3675 // environment uses values which have not been defined yet. These 3676 // Hydrogen instructions will then be replayed by the Lithium 3677 // translation, so they cannot have an environment effect. The edge to 3678 // the body's entry block (along with some special logic for the start 3679 // block in HInstruction::InsertAfter) seals the start block from 3680 // getting unwanted instructions inserted. 3681 // 3682 // TODO(kmillikin): Fix this. Stop mutating the initial environment. 3683 // Make the Hydrogen instructions in the initial block into Hydrogen 3684 // values (but not instructions), present in the initial environment and 3685 // not replayed by the Lithium translation. 3686 HEnvironment* initial_env = environment()->CopyWithoutHistory(); 3687 HBasicBlock* body_entry = CreateBasicBlock(initial_env); 3688 Goto(body_entry); 3689 body_entry->SetJoinId(BailoutId::FunctionEntry()); 3690 set_current_block(body_entry); 3691 3692 // Handle implicit declaration of the function name in named function 3693 // expressions before other declarations. 3694 if (scope->is_function_scope() && scope->function() != NULL) { 3695 VisitVariableDeclaration(scope->function()); 3696 } 3697 VisitDeclarations(scope->declarations()); 3698 Add<HSimulate>(BailoutId::Declarations()); 3699 3700 Add<HStackCheck>(HStackCheck::kFunctionEntry); 3701 3702 VisitStatements(current_info()->function()->body()); 3703 if (HasStackOverflow()) return false; 3704 3705 if (current_block() != NULL) { 3706 Add<HReturn>(graph()->GetConstantUndefined()); 3707 set_current_block(NULL); 3708 } 3709 3710 // If the checksum of the number of type info changes is the same as the 3711 // last time this function was compiled, then this recompile is likely not 3712 // due to missing/inadequate type feedback, but rather too aggressive 3713 // optimization. Disable optimistic LICM in that case. 3714 Handle<Code> unoptimized_code(current_info()->shared_info()->code()); 3715 ASSERT(unoptimized_code->kind() == Code::FUNCTION); 3716 Handle<TypeFeedbackInfo> type_info( 3717 TypeFeedbackInfo::cast(unoptimized_code->type_feedback_info())); 3718 int checksum = type_info->own_type_change_checksum(); 3719 int composite_checksum = graph()->update_type_change_checksum(checksum); 3720 graph()->set_use_optimistic_licm( 3721 !type_info->matches_inlined_type_change_checksum(composite_checksum)); 3722 type_info->set_inlined_type_change_checksum(composite_checksum); 3723 3724 // Perform any necessary OSR-specific cleanups or changes to the graph. 3725 osr()->FinishGraph(); 3726 3727 return true; 3728 } 3729 3730 3731 bool HGraph::Optimize(BailoutReason* bailout_reason) { 3732 OrderBlocks(); 3733 AssignDominators(); 3734 3735 // We need to create a HConstant "zero" now so that GVN will fold every 3736 // zero-valued constant in the graph together. 3737 // The constant is needed to make idef-based bounds check work: the pass 3738 // evaluates relations with "zero" and that zero cannot be created after GVN. 3739 GetConstant0(); 3740 3741 #ifdef DEBUG 3742 // Do a full verify after building the graph and computing dominators. 3743 Verify(true); 3744 #endif 3745 3746 if (FLAG_analyze_environment_liveness && maximum_environment_size() != 0) { 3747 Run<HEnvironmentLivenessAnalysisPhase>(); 3748 } 3749 3750 if (!CheckConstPhiUses()) { 3751 *bailout_reason = kUnsupportedPhiUseOfConstVariable; 3752 return false; 3753 } 3754 Run<HRedundantPhiEliminationPhase>(); 3755 if (!CheckArgumentsPhiUses()) { 3756 *bailout_reason = kUnsupportedPhiUseOfArguments; 3757 return false; 3758 } 3759 3760 // Find and mark unreachable code to simplify optimizations, especially gvn, 3761 // where unreachable code could unnecessarily defeat LICM. 3762 Run<HMarkUnreachableBlocksPhase>(); 3763 3764 if (FLAG_dead_code_elimination) Run<HDeadCodeEliminationPhase>(); 3765 if (FLAG_use_escape_analysis) Run<HEscapeAnalysisPhase>(); 3766 3767 if (FLAG_load_elimination) Run<HLoadEliminationPhase>(); 3768 3769 CollectPhis(); 3770 3771 if (has_osr()) osr()->FinishOsrValues(); 3772 3773 Run<HInferRepresentationPhase>(); 3774 3775 // Remove HSimulate instructions that have turned out not to be needed 3776 // after all by folding them into the following HSimulate. 3777 // This must happen after inferring representations. 3778 Run<HMergeRemovableSimulatesPhase>(); 3779 3780 Run<HMarkDeoptimizeOnUndefinedPhase>(); 3781 Run<HRepresentationChangesPhase>(); 3782 3783 Run<HInferTypesPhase>(); 3784 3785 // Must be performed before canonicalization to ensure that Canonicalize 3786 // will not remove semantically meaningful ToInt32 operations e.g. BIT_OR with 3787 // zero. 3788 if (FLAG_opt_safe_uint32_operations) Run<HUint32AnalysisPhase>(); 3789 3790 if (FLAG_use_canonicalizing) Run<HCanonicalizePhase>(); 3791 3792 if (FLAG_use_gvn) Run<HGlobalValueNumberingPhase>(); 3793 3794 if (FLAG_check_elimination) Run<HCheckEliminationPhase>(); 3795 3796 if (FLAG_use_range) Run<HRangeAnalysisPhase>(); 3797 3798 Run<HComputeChangeUndefinedToNaN>(); 3799 Run<HComputeMinusZeroChecksPhase>(); 3800 3801 // Eliminate redundant stack checks on backwards branches. 3802 Run<HStackCheckEliminationPhase>(); 3803 3804 if (FLAG_array_bounds_checks_elimination) Run<HBoundsCheckEliminationPhase>(); 3805 if (FLAG_array_bounds_checks_hoisting) Run<HBoundsCheckHoistingPhase>(); 3806 if (FLAG_array_index_dehoisting) Run<HDehoistIndexComputationsPhase>(); 3807 if (FLAG_dead_code_elimination) Run<HDeadCodeEliminationPhase>(); 3808 3809 RestoreActualValues(); 3810 3811 // Find unreachable code a second time, GVN and other optimizations may have 3812 // made blocks unreachable that were previously reachable. 3813 Run<HMarkUnreachableBlocksPhase>(); 3814 3815 return true; 3816 } 3817 3818 3819 void HGraph::RestoreActualValues() { 3820 HPhase phase("H_Restore actual values", this); 3821 3822 for (int block_index = 0; block_index < blocks()->length(); block_index++) { 3823 HBasicBlock* block = blocks()->at(block_index); 3824 3825 #ifdef DEBUG 3826 for (int i = 0; i < block->phis()->length(); i++) { 3827 HPhi* phi = block->phis()->at(i); 3828 ASSERT(phi->ActualValue() == phi); 3829 } 3830 #endif 3831 3832 for (HInstructionIterator it(block); !it.Done(); it.Advance()) { 3833 HInstruction* instruction = it.Current(); 3834 if (instruction->ActualValue() != instruction) { 3835 ASSERT(instruction->IsInformativeDefinition()); 3836 if (instruction->IsPurelyInformativeDefinition()) { 3837 instruction->DeleteAndReplaceWith(instruction->RedefinedOperand()); 3838 } else { 3839 instruction->ReplaceAllUsesWith(instruction->ActualValue()); 3840 } 3841 } 3842 } 3843 } 3844 } 3845 3846 3847 template <class Instruction> 3848 HInstruction* HOptimizedGraphBuilder::PreProcessCall(Instruction* call) { 3849 int count = call->argument_count(); 3850 ZoneList<HValue*> arguments(count, zone()); 3851 for (int i = 0; i < count; ++i) { 3852 arguments.Add(Pop(), zone()); 3853 } 3854 3855 while (!arguments.is_empty()) { 3856 Add<HPushArgument>(arguments.RemoveLast()); 3857 } 3858 return call; 3859 } 3860 3861 3862 void HOptimizedGraphBuilder::SetUpScope(Scope* scope) { 3863 // First special is HContext. 3864 HInstruction* context = Add<HContext>(); 3865 environment()->BindContext(context); 3866 3867 // Create an arguments object containing the initial parameters. Set the 3868 // initial values of parameters including "this" having parameter index 0. 3869 ASSERT_EQ(scope->num_parameters() + 1, environment()->parameter_count()); 3870 HArgumentsObject* arguments_object = 3871 New<HArgumentsObject>(environment()->parameter_count()); 3872 for (int i = 0; i < environment()->parameter_count(); ++i) { 3873 HInstruction* parameter = Add<HParameter>(i); 3874 arguments_object->AddArgument(parameter, zone()); 3875 environment()->Bind(i, parameter); 3876 } 3877 AddInstruction(arguments_object); 3878 graph()->SetArgumentsObject(arguments_object); 3879 3880 HConstant* undefined_constant = graph()->GetConstantUndefined(); 3881 // Initialize specials and locals to undefined. 3882 for (int i = environment()->parameter_count() + 1; 3883 i < environment()->length(); 3884 ++i) { 3885 environment()->Bind(i, undefined_constant); 3886 } 3887 3888 // Handle the arguments and arguments shadow variables specially (they do 3889 // not have declarations). 3890 if (scope->arguments() != NULL) { 3891 if (!scope->arguments()->IsStackAllocated()) { 3892 return Bailout(kContextAllocatedArguments); 3893 } 3894 3895 environment()->Bind(scope->arguments(), 3896 graph()->GetArgumentsObject()); 3897 } 3898 } 3899 3900 3901 void HOptimizedGraphBuilder::VisitStatements(ZoneList<Statement*>* statements) { 3902 for (int i = 0; i < statements->length(); i++) { 3903 Statement* stmt = statements->at(i); 3904 CHECK_ALIVE(Visit(stmt)); 3905 if (stmt->IsJump()) break; 3906 } 3907 } 3908 3909 3910 void HOptimizedGraphBuilder::VisitBlock(Block* stmt) { 3911 ASSERT(!HasStackOverflow()); 3912 ASSERT(current_block() != NULL); 3913 ASSERT(current_block()->HasPredecessor()); 3914 if (stmt->scope() != NULL) { 3915 return Bailout(kScopedBlock); 3916 } 3917 BreakAndContinueInfo break_info(stmt); 3918 { BreakAndContinueScope push(&break_info, this); 3919 CHECK_BAILOUT(VisitStatements(stmt->statements())); 3920 } 3921 HBasicBlock* break_block = break_info.break_block(); 3922 if (break_block != NULL) { 3923 if (current_block() != NULL) Goto(break_block); 3924 break_block->SetJoinId(stmt->ExitId()); 3925 set_current_block(break_block); 3926 } 3927 } 3928 3929 3930 void HOptimizedGraphBuilder::VisitExpressionStatement( 3931 ExpressionStatement* stmt) { 3932 ASSERT(!HasStackOverflow()); 3933 ASSERT(current_block() != NULL); 3934 ASSERT(current_block()->HasPredecessor()); 3935 VisitForEffect(stmt->expression()); 3936 } 3937 3938 3939 void HOptimizedGraphBuilder::VisitEmptyStatement(EmptyStatement* stmt) { 3940 ASSERT(!HasStackOverflow()); 3941 ASSERT(current_block() != NULL); 3942 ASSERT(current_block()->HasPredecessor()); 3943 } 3944 3945 3946 void HOptimizedGraphBuilder::VisitIfStatement(IfStatement* stmt) { 3947 ASSERT(!HasStackOverflow()); 3948 ASSERT(current_block() != NULL); 3949 ASSERT(current_block()->HasPredecessor()); 3950 if (stmt->condition()->ToBooleanIsTrue()) { 3951 Add<HSimulate>(stmt->ThenId()); 3952 Visit(stmt->then_statement()); 3953 } else if (stmt->condition()->ToBooleanIsFalse()) { 3954 Add<HSimulate>(stmt->ElseId()); 3955 Visit(stmt->else_statement()); 3956 } else { 3957 HBasicBlock* cond_true = graph()->CreateBasicBlock(); 3958 HBasicBlock* cond_false = graph()->CreateBasicBlock(); 3959 CHECK_BAILOUT(VisitForControl(stmt->condition(), cond_true, cond_false)); 3960 3961 if (cond_true->HasPredecessor()) { 3962 cond_true->SetJoinId(stmt->ThenId()); 3963 set_current_block(cond_true); 3964 CHECK_BAILOUT(Visit(stmt->then_statement())); 3965 cond_true = current_block(); 3966 } else { 3967 cond_true = NULL; 3968 } 3969 3970 if (cond_false->HasPredecessor()) { 3971 cond_false->SetJoinId(stmt->ElseId()); 3972 set_current_block(cond_false); 3973 CHECK_BAILOUT(Visit(stmt->else_statement())); 3974 cond_false = current_block(); 3975 } else { 3976 cond_false = NULL; 3977 } 3978 3979 HBasicBlock* join = CreateJoin(cond_true, cond_false, stmt->IfId()); 3980 set_current_block(join); 3981 } 3982 } 3983 3984 3985 HBasicBlock* HOptimizedGraphBuilder::BreakAndContinueScope::Get( 3986 BreakableStatement* stmt, 3987 BreakType type, 3988 int* drop_extra) { 3989 *drop_extra = 0; 3990 BreakAndContinueScope* current = this; 3991 while (current != NULL && current->info()->target() != stmt) { 3992 *drop_extra += current->info()->drop_extra(); 3993 current = current->next(); 3994 } 3995 ASSERT(current != NULL); // Always found (unless stack is malformed). 3996 3997 if (type == BREAK) { 3998 *drop_extra += current->info()->drop_extra(); 3999 } 4000 4001 HBasicBlock* block = NULL; 4002 switch (type) { 4003 case BREAK: 4004 block = current->info()->break_block(); 4005 if (block == NULL) { 4006 block = current->owner()->graph()->CreateBasicBlock(); 4007 current->info()->set_break_block(block); 4008 } 4009 break; 4010 4011 case CONTINUE: 4012 block = current->info()->continue_block(); 4013 if (block == NULL) { 4014 block = current->owner()->graph()->CreateBasicBlock(); 4015 current->info()->set_continue_block(block); 4016 } 4017 break; 4018 } 4019 4020 return block; 4021 } 4022 4023 4024 void HOptimizedGraphBuilder::VisitContinueStatement( 4025 ContinueStatement* stmt) { 4026 ASSERT(!HasStackOverflow()); 4027 ASSERT(current_block() != NULL); 4028 ASSERT(current_block()->HasPredecessor()); 4029 int drop_extra = 0; 4030 HBasicBlock* continue_block = break_scope()->Get( 4031 stmt->target(), BreakAndContinueScope::CONTINUE, &drop_extra); 4032 Drop(drop_extra); 4033 Goto(continue_block); 4034 set_current_block(NULL); 4035 } 4036 4037 4038 void HOptimizedGraphBuilder::VisitBreakStatement(BreakStatement* stmt) { 4039 ASSERT(!HasStackOverflow()); 4040 ASSERT(current_block() != NULL); 4041 ASSERT(current_block()->HasPredecessor()); 4042 int drop_extra = 0; 4043 HBasicBlock* break_block = break_scope()->Get( 4044 stmt->target(), BreakAndContinueScope::BREAK, &drop_extra); 4045 Drop(drop_extra); 4046 Goto(break_block); 4047 set_current_block(NULL); 4048 } 4049 4050 4051 void HOptimizedGraphBuilder::VisitReturnStatement(ReturnStatement* stmt) { 4052 ASSERT(!HasStackOverflow()); 4053 ASSERT(current_block() != NULL); 4054 ASSERT(current_block()->HasPredecessor()); 4055 FunctionState* state = function_state(); 4056 AstContext* context = call_context(); 4057 if (context == NULL) { 4058 // Not an inlined return, so an actual one. 4059 CHECK_ALIVE(VisitForValue(stmt->expression())); 4060 HValue* result = environment()->Pop(); 4061 Add<HReturn>(result); 4062 } else if (state->inlining_kind() == CONSTRUCT_CALL_RETURN) { 4063 // Return from an inlined construct call. In a test context the return value 4064 // will always evaluate to true, in a value context the return value needs 4065 // to be a JSObject. 4066 if (context->IsTest()) { 4067 TestContext* test = TestContext::cast(context); 4068 CHECK_ALIVE(VisitForEffect(stmt->expression())); 4069 Goto(test->if_true(), state); 4070 } else if (context->IsEffect()) { 4071 CHECK_ALIVE(VisitForEffect(stmt->expression())); 4072 Goto(function_return(), state); 4073 } else { 4074 ASSERT(context->IsValue()); 4075 CHECK_ALIVE(VisitForValue(stmt->expression())); 4076 HValue* return_value = Pop(); 4077 HValue* receiver = environment()->arguments_environment()->Lookup(0); 4078 HHasInstanceTypeAndBranch* typecheck = 4079 New<HHasInstanceTypeAndBranch>(return_value, 4080 FIRST_SPEC_OBJECT_TYPE, 4081 LAST_SPEC_OBJECT_TYPE); 4082 HBasicBlock* if_spec_object = graph()->CreateBasicBlock(); 4083 HBasicBlock* not_spec_object = graph()->CreateBasicBlock(); 4084 typecheck->SetSuccessorAt(0, if_spec_object); 4085 typecheck->SetSuccessorAt(1, not_spec_object); 4086 FinishCurrentBlock(typecheck); 4087 AddLeaveInlined(if_spec_object, return_value, state); 4088 AddLeaveInlined(not_spec_object, receiver, state); 4089 } 4090 } else if (state->inlining_kind() == SETTER_CALL_RETURN) { 4091 // Return from an inlined setter call. The returned value is never used, the 4092 // value of an assignment is always the value of the RHS of the assignment. 4093 CHECK_ALIVE(VisitForEffect(stmt->expression())); 4094 if (context->IsTest()) { 4095 HValue* rhs = environment()->arguments_environment()->Lookup(1); 4096 context->ReturnValue(rhs); 4097 } else if (context->IsEffect()) { 4098 Goto(function_return(), state); 4099 } else { 4100 ASSERT(context->IsValue()); 4101 HValue* rhs = environment()->arguments_environment()->Lookup(1); 4102 AddLeaveInlined(rhs, state); 4103 } 4104 } else { 4105 // Return from a normal inlined function. Visit the subexpression in the 4106 // expression context of the call. 4107 if (context->IsTest()) { 4108 TestContext* test = TestContext::cast(context); 4109 VisitForControl(stmt->expression(), test->if_true(), test->if_false()); 4110 } else if (context->IsEffect()) { 4111 CHECK_ALIVE(VisitForEffect(stmt->expression())); 4112 Goto(function_return(), state); 4113 } else { 4114 ASSERT(context->IsValue()); 4115 CHECK_ALIVE(VisitForValue(stmt->expression())); 4116 AddLeaveInlined(Pop(), state); 4117 } 4118 } 4119 set_current_block(NULL); 4120 } 4121 4122 4123 void HOptimizedGraphBuilder::VisitWithStatement(WithStatement* stmt) { 4124 ASSERT(!HasStackOverflow()); 4125 ASSERT(current_block() != NULL); 4126 ASSERT(current_block()->HasPredecessor()); 4127 return Bailout(kWithStatement); 4128 } 4129 4130 4131 void HOptimizedGraphBuilder::VisitSwitchStatement(SwitchStatement* stmt) { 4132 ASSERT(!HasStackOverflow()); 4133 ASSERT(current_block() != NULL); 4134 ASSERT(current_block()->HasPredecessor()); 4135 4136 // We only optimize switch statements with smi-literal smi comparisons, 4137 // with a bounded number of clauses. 4138 const int kCaseClauseLimit = 128; 4139 ZoneList<CaseClause*>* clauses = stmt->cases(); 4140 int clause_count = clauses->length(); 4141 if (clause_count > kCaseClauseLimit) { 4142 return Bailout(kSwitchStatementTooManyClauses); 4143 } 4144 4145 ASSERT(stmt->switch_type() != SwitchStatement::UNKNOWN_SWITCH); 4146 if (stmt->switch_type() == SwitchStatement::GENERIC_SWITCH) { 4147 return Bailout(kSwitchStatementMixedOrNonLiteralSwitchLabels); 4148 } 4149 4150 CHECK_ALIVE(VisitForValue(stmt->tag())); 4151 Add<HSimulate>(stmt->EntryId()); 4152 HValue* tag_value = Pop(); 4153 HBasicBlock* first_test_block = current_block(); 4154 4155 HUnaryControlInstruction* string_check = NULL; 4156 HBasicBlock* not_string_block = NULL; 4157 4158 // Test switch's tag value if all clauses are string literals 4159 if (stmt->switch_type() == SwitchStatement::STRING_SWITCH) { 4160 first_test_block = graph()->CreateBasicBlock(); 4161 not_string_block = graph()->CreateBasicBlock(); 4162 string_check = New<HIsStringAndBranch>( 4163 tag_value, first_test_block, not_string_block); 4164 FinishCurrentBlock(string_check); 4165 4166 set_current_block(first_test_block); 4167 } 4168 4169 // 1. Build all the tests, with dangling true branches 4170 BailoutId default_id = BailoutId::None(); 4171 for (int i = 0; i < clause_count; ++i) { 4172 CaseClause* clause = clauses->at(i); 4173 if (clause->is_default()) { 4174 default_id = clause->EntryId(); 4175 continue; 4176 } 4177 4178 // Generate a compare and branch. 4179 CHECK_ALIVE(VisitForValue(clause->label())); 4180 HValue* label_value = Pop(); 4181 4182 HBasicBlock* next_test_block = graph()->CreateBasicBlock(); 4183 HBasicBlock* body_block = graph()->CreateBasicBlock(); 4184 4185 HControlInstruction* compare; 4186 4187 if (stmt->switch_type() == SwitchStatement::SMI_SWITCH) { 4188 if (!clause->compare_type()->Is(Type::Smi())) { 4189 Add<HDeoptimize>("Non-smi switch type", Deoptimizer::SOFT); 4190 } 4191 4192 HCompareNumericAndBranch* compare_ = 4193 New<HCompareNumericAndBranch>(tag_value, 4194 label_value, 4195 Token::EQ_STRICT); 4196 compare_->set_observed_input_representation( 4197 Representation::Smi(), Representation::Smi()); 4198 compare = compare_; 4199 } else { 4200 compare = New<HStringCompareAndBranch>(tag_value, 4201 label_value, 4202 Token::EQ_STRICT); 4203 } 4204 4205 compare->SetSuccessorAt(0, body_block); 4206 compare->SetSuccessorAt(1, next_test_block); 4207 FinishCurrentBlock(compare); 4208 4209 set_current_block(next_test_block); 4210 } 4211 4212 // Save the current block to use for the default or to join with the 4213 // exit. 4214 HBasicBlock* last_block = current_block(); 4215 4216 if (not_string_block != NULL) { 4217 BailoutId join_id = !default_id.IsNone() ? default_id : stmt->ExitId(); 4218 last_block = CreateJoin(last_block, not_string_block, join_id); 4219 } 4220 4221 // 2. Loop over the clauses and the linked list of tests in lockstep, 4222 // translating the clause bodies. 4223 HBasicBlock* curr_test_block = first_test_block; 4224 HBasicBlock* fall_through_block = NULL; 4225 4226 BreakAndContinueInfo break_info(stmt); 4227 { BreakAndContinueScope push(&break_info, this); 4228 for (int i = 0; i < clause_count; ++i) { 4229 CaseClause* clause = clauses->at(i); 4230 4231 // Identify the block where normal (non-fall-through) control flow 4232 // goes to. 4233 HBasicBlock* normal_block = NULL; 4234 if (clause->is_default()) { 4235 if (last_block != NULL) { 4236 normal_block = last_block; 4237 last_block = NULL; // Cleared to indicate we've handled it. 4238 } 4239 } else { 4240 // If the current test block is deoptimizing due to an unhandled clause 4241 // of the switch, the test instruction is in the next block since the 4242 // deopt must end the current block. 4243 if (curr_test_block->IsDeoptimizing()) { 4244 ASSERT(curr_test_block->end()->SecondSuccessor() == NULL); 4245 curr_test_block = curr_test_block->end()->FirstSuccessor(); 4246 } 4247 normal_block = curr_test_block->end()->FirstSuccessor(); 4248 curr_test_block = curr_test_block->end()->SecondSuccessor(); 4249 } 4250 4251 // Identify a block to emit the body into. 4252 if (normal_block == NULL) { 4253 if (fall_through_block == NULL) { 4254 // (a) Unreachable. 4255 if (clause->is_default()) { 4256 continue; // Might still be reachable clause bodies. 4257 } else { 4258 break; 4259 } 4260 } else { 4261 // (b) Reachable only as fall through. 4262 set_current_block(fall_through_block); 4263 } 4264 } else if (fall_through_block == NULL) { 4265 // (c) Reachable only normally. 4266 set_current_block(normal_block); 4267 } else { 4268 // (d) Reachable both ways. 4269 HBasicBlock* join = CreateJoin(fall_through_block, 4270 normal_block, 4271 clause->EntryId()); 4272 set_current_block(join); 4273 } 4274 4275 CHECK_BAILOUT(VisitStatements(clause->statements())); 4276 fall_through_block = current_block(); 4277 } 4278 } 4279 4280 // Create an up-to-3-way join. Use the break block if it exists since 4281 // it's already a join block. 4282 HBasicBlock* break_block = break_info.break_block(); 4283 if (break_block == NULL) { 4284 set_current_block(CreateJoin(fall_through_block, 4285 last_block, 4286 stmt->ExitId())); 4287 } else { 4288 if (fall_through_block != NULL) Goto(fall_through_block, break_block); 4289 if (last_block != NULL) Goto(last_block, break_block); 4290 break_block->SetJoinId(stmt->ExitId()); 4291 set_current_block(break_block); 4292 } 4293 } 4294 4295 4296 void HOptimizedGraphBuilder::VisitLoopBody(IterationStatement* stmt, 4297 HBasicBlock* loop_entry, 4298 BreakAndContinueInfo* break_info) { 4299 BreakAndContinueScope push(break_info, this); 4300 Add<HSimulate>(stmt->StackCheckId()); 4301 HStackCheck* stack_check = 4302 HStackCheck::cast(Add<HStackCheck>(HStackCheck::kBackwardsBranch)); 4303 ASSERT(loop_entry->IsLoopHeader()); 4304 loop_entry->loop_information()->set_stack_check(stack_check); 4305 CHECK_BAILOUT(Visit(stmt->body())); 4306 } 4307 4308 4309 void HOptimizedGraphBuilder::VisitDoWhileStatement(DoWhileStatement* stmt) { 4310 ASSERT(!HasStackOverflow()); 4311 ASSERT(current_block() != NULL); 4312 ASSERT(current_block()->HasPredecessor()); 4313 ASSERT(current_block() != NULL); 4314 HBasicBlock* loop_entry = BuildLoopEntry(stmt); 4315 4316 BreakAndContinueInfo break_info(stmt); 4317 CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info)); 4318 HBasicBlock* body_exit = 4319 JoinContinue(stmt, current_block(), break_info.continue_block()); 4320 HBasicBlock* loop_successor = NULL; 4321 if (body_exit != NULL && !stmt->cond()->ToBooleanIsTrue()) { 4322 set_current_block(body_exit); 4323 // The block for a true condition, the actual predecessor block of the 4324 // back edge. 4325 body_exit = graph()->CreateBasicBlock(); 4326 loop_successor = graph()->CreateBasicBlock(); 4327 CHECK_BAILOUT(VisitForControl(stmt->cond(), body_exit, loop_successor)); 4328 if (body_exit->HasPredecessor()) { 4329 body_exit->SetJoinId(stmt->BackEdgeId()); 4330 } else { 4331 body_exit = NULL; 4332 } 4333 if (loop_successor->HasPredecessor()) { 4334 loop_successor->SetJoinId(stmt->ExitId()); 4335 } else { 4336 loop_successor = NULL; 4337 } 4338 } 4339 HBasicBlock* loop_exit = CreateLoop(stmt, 4340 loop_entry, 4341 body_exit, 4342 loop_successor, 4343 break_info.break_block()); 4344 set_current_block(loop_exit); 4345 } 4346 4347 4348 void HOptimizedGraphBuilder::VisitWhileStatement(WhileStatement* stmt) { 4349 ASSERT(!HasStackOverflow()); 4350 ASSERT(current_block() != NULL); 4351 ASSERT(current_block()->HasPredecessor()); 4352 ASSERT(current_block() != NULL); 4353 HBasicBlock* loop_entry = BuildLoopEntry(stmt); 4354 4355 // If the condition is constant true, do not generate a branch. 4356 HBasicBlock* loop_successor = NULL; 4357 if (!stmt->cond()->ToBooleanIsTrue()) { 4358 HBasicBlock* body_entry = graph()->CreateBasicBlock(); 4359 loop_successor = graph()->CreateBasicBlock(); 4360 CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor)); 4361 if (body_entry->HasPredecessor()) { 4362 body_entry->SetJoinId(stmt->BodyId()); 4363 set_current_block(body_entry); 4364 } 4365 if (loop_successor->HasPredecessor()) { 4366 loop_successor->SetJoinId(stmt->ExitId()); 4367 } else { 4368 loop_successor = NULL; 4369 } 4370 } 4371 4372 BreakAndContinueInfo break_info(stmt); 4373 if (current_block() != NULL) { 4374 CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info)); 4375 } 4376 HBasicBlock* body_exit = 4377 JoinContinue(stmt, current_block(), break_info.continue_block()); 4378 HBasicBlock* loop_exit = CreateLoop(stmt, 4379 loop_entry, 4380 body_exit, 4381 loop_successor, 4382 break_info.break_block()); 4383 set_current_block(loop_exit); 4384 } 4385 4386 4387 void HOptimizedGraphBuilder::VisitForStatement(ForStatement* stmt) { 4388 ASSERT(!HasStackOverflow()); 4389 ASSERT(current_block() != NULL); 4390 ASSERT(current_block()->HasPredecessor()); 4391 if (stmt->init() != NULL) { 4392 CHECK_ALIVE(Visit(stmt->init())); 4393 } 4394 ASSERT(current_block() != NULL); 4395 HBasicBlock* loop_entry = BuildLoopEntry(stmt); 4396 4397 HBasicBlock* loop_successor = NULL; 4398 if (stmt->cond() != NULL) { 4399 HBasicBlock* body_entry = graph()->CreateBasicBlock(); 4400 loop_successor = graph()->CreateBasicBlock(); 4401 CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor)); 4402 if (body_entry->HasPredecessor()) { 4403 body_entry->SetJoinId(stmt->BodyId()); 4404 set_current_block(body_entry); 4405 } 4406 if (loop_successor->HasPredecessor()) { 4407 loop_successor->SetJoinId(stmt->ExitId()); 4408 } else { 4409 loop_successor = NULL; 4410 } 4411 } 4412 4413 BreakAndContinueInfo break_info(stmt); 4414 if (current_block() != NULL) { 4415 CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info)); 4416 } 4417 HBasicBlock* body_exit = 4418 JoinContinue(stmt, current_block(), break_info.continue_block()); 4419 4420 if (stmt->next() != NULL && body_exit != NULL) { 4421 set_current_block(body_exit); 4422 CHECK_BAILOUT(Visit(stmt->next())); 4423 body_exit = current_block(); 4424 } 4425 4426 HBasicBlock* loop_exit = CreateLoop(stmt, 4427 loop_entry, 4428 body_exit, 4429 loop_successor, 4430 break_info.break_block()); 4431 set_current_block(loop_exit); 4432 } 4433 4434 4435 void HOptimizedGraphBuilder::VisitForInStatement(ForInStatement* stmt) { 4436 ASSERT(!HasStackOverflow()); 4437 ASSERT(current_block() != NULL); 4438 ASSERT(current_block()->HasPredecessor()); 4439 4440 if (!FLAG_optimize_for_in) { 4441 return Bailout(kForInStatementOptimizationIsDisabled); 4442 } 4443 4444 if (stmt->for_in_type() != ForInStatement::FAST_FOR_IN) { 4445 return Bailout(kForInStatementIsNotFastCase); 4446 } 4447 4448 if (!stmt->each()->IsVariableProxy() || 4449 !stmt->each()->AsVariableProxy()->var()->IsStackLocal()) { 4450 return Bailout(kForInStatementWithNonLocalEachVariable); 4451 } 4452 4453 Variable* each_var = stmt->each()->AsVariableProxy()->var(); 4454 4455 CHECK_ALIVE(VisitForValue(stmt->enumerable())); 4456 HValue* enumerable = Top(); // Leave enumerable at the top. 4457 4458 HInstruction* map = Add<HForInPrepareMap>(enumerable); 4459 Add<HSimulate>(stmt->PrepareId()); 4460 4461 HInstruction* array = Add<HForInCacheArray>( 4462 enumerable, map, DescriptorArray::kEnumCacheBridgeCacheIndex); 4463 4464 HInstruction* enum_length = Add<HMapEnumLength>(map); 4465 4466 HInstruction* start_index = Add<HConstant>(0); 4467 4468 Push(map); 4469 Push(array); 4470 Push(enum_length); 4471 Push(start_index); 4472 4473 HInstruction* index_cache = Add<HForInCacheArray>( 4474 enumerable, map, DescriptorArray::kEnumCacheBridgeIndicesCacheIndex); 4475 HForInCacheArray::cast(array)->set_index_cache( 4476 HForInCacheArray::cast(index_cache)); 4477 4478 HBasicBlock* loop_entry = BuildLoopEntry(stmt); 4479 4480 HValue* index = environment()->ExpressionStackAt(0); 4481 HValue* limit = environment()->ExpressionStackAt(1); 4482 4483 // Check that we still have more keys. 4484 HCompareNumericAndBranch* compare_index = 4485 New<HCompareNumericAndBranch>(index, limit, Token::LT); 4486 compare_index->set_observed_input_representation( 4487 Representation::Smi(), Representation::Smi()); 4488 4489 HBasicBlock* loop_body = graph()->CreateBasicBlock(); 4490 HBasicBlock* loop_successor = graph()->CreateBasicBlock(); 4491 4492 compare_index->SetSuccessorAt(0, loop_body); 4493 compare_index->SetSuccessorAt(1, loop_successor); 4494 FinishCurrentBlock(compare_index); 4495 4496 set_current_block(loop_successor); 4497 Drop(5); 4498 4499 set_current_block(loop_body); 4500 4501 HValue* key = Add<HLoadKeyed>( 4502 environment()->ExpressionStackAt(2), // Enum cache. 4503 environment()->ExpressionStackAt(0), // Iteration index. 4504 environment()->ExpressionStackAt(0), 4505 FAST_ELEMENTS); 4506 4507 // Check if the expected map still matches that of the enumerable. 4508 // If not just deoptimize. 4509 Add<HCheckMapValue>(environment()->ExpressionStackAt(4), 4510 environment()->ExpressionStackAt(3)); 4511 4512 Bind(each_var, key); 4513 4514 BreakAndContinueInfo break_info(stmt, 5); 4515 CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info)); 4516 4517 HBasicBlock* body_exit = 4518 JoinContinue(stmt, current_block(), break_info.continue_block()); 4519 4520 if (body_exit != NULL) { 4521 set_current_block(body_exit); 4522 4523 HValue* current_index = Pop(); 4524 Push(AddUncasted<HAdd>(current_index, graph()->GetConstant1())); 4525 body_exit = current_block(); 4526 } 4527 4528 HBasicBlock* loop_exit = CreateLoop(stmt, 4529 loop_entry, 4530 body_exit, 4531 loop_successor, 4532 break_info.break_block()); 4533 4534 set_current_block(loop_exit); 4535 } 4536 4537 4538 void HOptimizedGraphBuilder::VisitForOfStatement(ForOfStatement* stmt) { 4539 ASSERT(!HasStackOverflow()); 4540 ASSERT(current_block() != NULL); 4541 ASSERT(current_block()->HasPredecessor()); 4542 return Bailout(kForOfStatement); 4543 } 4544 4545 4546 void HOptimizedGraphBuilder::VisitTryCatchStatement(TryCatchStatement* stmt) { 4547 ASSERT(!HasStackOverflow()); 4548 ASSERT(current_block() != NULL); 4549 ASSERT(current_block()->HasPredecessor()); 4550 return Bailout(kTryCatchStatement); 4551 } 4552 4553 4554 void HOptimizedGraphBuilder::VisitTryFinallyStatement( 4555 TryFinallyStatement* stmt) { 4556 ASSERT(!HasStackOverflow()); 4557 ASSERT(current_block() != NULL); 4558 ASSERT(current_block()->HasPredecessor()); 4559 return Bailout(kTryFinallyStatement); 4560 } 4561 4562 4563 void HOptimizedGraphBuilder::VisitDebuggerStatement(DebuggerStatement* stmt) { 4564 ASSERT(!HasStackOverflow()); 4565 ASSERT(current_block() != NULL); 4566 ASSERT(current_block()->HasPredecessor()); 4567 return Bailout(kDebuggerStatement); 4568 } 4569 4570 4571 void HOptimizedGraphBuilder::VisitCaseClause(CaseClause* clause) { 4572 UNREACHABLE(); 4573 } 4574 4575 4576 static Handle<SharedFunctionInfo> SearchSharedFunctionInfo( 4577 Code* unoptimized_code, FunctionLiteral* expr) { 4578 int start_position = expr->start_position(); 4579 for (RelocIterator it(unoptimized_code); !it.done(); it.next()) { 4580 RelocInfo* rinfo = it.rinfo(); 4581 if (rinfo->rmode() != RelocInfo::EMBEDDED_OBJECT) continue; 4582 Object* obj = rinfo->target_object(); 4583 if (obj->IsSharedFunctionInfo()) { 4584 SharedFunctionInfo* shared = SharedFunctionInfo::cast(obj); 4585 if (shared->start_position() == start_position) { 4586 return Handle<SharedFunctionInfo>(shared); 4587 } 4588 } 4589 } 4590 4591 return Handle<SharedFunctionInfo>(); 4592 } 4593 4594 4595 void HOptimizedGraphBuilder::VisitFunctionLiteral(FunctionLiteral* expr) { 4596 ASSERT(!HasStackOverflow()); 4597 ASSERT(current_block() != NULL); 4598 ASSERT(current_block()->HasPredecessor()); 4599 Handle<SharedFunctionInfo> shared_info = 4600 SearchSharedFunctionInfo(current_info()->shared_info()->code(), expr); 4601 if (shared_info.is_null()) { 4602 shared_info = Compiler::BuildFunctionInfo(expr, current_info()->script()); 4603 } 4604 // We also have a stack overflow if the recursive compilation did. 4605 if (HasStackOverflow()) return; 4606 HFunctionLiteral* instr = 4607 New<HFunctionLiteral>(shared_info, expr->pretenure()); 4608 return ast_context()->ReturnInstruction(instr, expr->id()); 4609 } 4610 4611 4612 void HOptimizedGraphBuilder::VisitNativeFunctionLiteral( 4613 NativeFunctionLiteral* expr) { 4614 ASSERT(!HasStackOverflow()); 4615 ASSERT(current_block() != NULL); 4616 ASSERT(current_block()->HasPredecessor()); 4617 return Bailout(kNativeFunctionLiteral); 4618 } 4619 4620 4621 void HOptimizedGraphBuilder::VisitConditional(Conditional* expr) { 4622 ASSERT(!HasStackOverflow()); 4623 ASSERT(current_block() != NULL); 4624 ASSERT(current_block()->HasPredecessor()); 4625 HBasicBlock* cond_true = graph()->CreateBasicBlock(); 4626 HBasicBlock* cond_false = graph()->CreateBasicBlock(); 4627 CHECK_BAILOUT(VisitForControl(expr->condition(), cond_true, cond_false)); 4628 4629 // Visit the true and false subexpressions in the same AST context as the 4630 // whole expression. 4631 if (cond_true->HasPredecessor()) { 4632 cond_true->SetJoinId(expr->ThenId()); 4633 set_current_block(cond_true); 4634 CHECK_BAILOUT(Visit(expr->then_expression())); 4635 cond_true = current_block(); 4636 } else { 4637 cond_true = NULL; 4638 } 4639 4640 if (cond_false->HasPredecessor()) { 4641 cond_false->SetJoinId(expr->ElseId()); 4642 set_current_block(cond_false); 4643 CHECK_BAILOUT(Visit(expr->else_expression())); 4644 cond_false = current_block(); 4645 } else { 4646 cond_false = NULL; 4647 } 4648 4649 if (!ast_context()->IsTest()) { 4650 HBasicBlock* join = CreateJoin(cond_true, cond_false, expr->id()); 4651 set_current_block(join); 4652 if (join != NULL && !ast_context()->IsEffect()) { 4653 return ast_context()->ReturnValue(Pop()); 4654 } 4655 } 4656 } 4657 4658 4659 HOptimizedGraphBuilder::GlobalPropertyAccess 4660 HOptimizedGraphBuilder::LookupGlobalProperty( 4661 Variable* var, LookupResult* lookup, bool is_store) { 4662 if (var->is_this() || !current_info()->has_global_object()) { 4663 return kUseGeneric; 4664 } 4665 Handle<GlobalObject> global(current_info()->global_object()); 4666 global->Lookup(*var->name(), lookup); 4667 if (!lookup->IsNormal() || 4668 (is_store && lookup->IsReadOnly()) || 4669 lookup->holder() != *global) { 4670 return kUseGeneric; 4671 } 4672 4673 return kUseCell; 4674 } 4675 4676 4677 HValue* HOptimizedGraphBuilder::BuildContextChainWalk(Variable* var) { 4678 ASSERT(var->IsContextSlot()); 4679 HValue* context = environment()->context(); 4680 int length = current_info()->scope()->ContextChainLength(var->scope()); 4681 while (length-- > 0) { 4682 context = Add<HOuterContext>(context); 4683 } 4684 return context; 4685 } 4686 4687 4688 void HOptimizedGraphBuilder::VisitVariableProxy(VariableProxy* expr) { 4689 ASSERT(!HasStackOverflow()); 4690 ASSERT(current_block() != NULL); 4691 ASSERT(current_block()->HasPredecessor()); 4692 Variable* variable = expr->var(); 4693 switch (variable->location()) { 4694 case Variable::UNALLOCATED: { 4695 if (IsLexicalVariableMode(variable->mode())) { 4696 // TODO(rossberg): should this be an ASSERT? 4697 return Bailout(kReferenceToGlobalLexicalVariable); 4698 } 4699 // Handle known global constants like 'undefined' specially to avoid a 4700 // load from a global cell for them. 4701 Handle<Object> constant_value = 4702 isolate()->factory()->GlobalConstantFor(variable->name()); 4703 if (!constant_value.is_null()) { 4704 HConstant* instr = New<HConstant>(constant_value); 4705 return ast_context()->ReturnInstruction(instr, expr->id()); 4706 } 4707 4708 LookupResult lookup(isolate()); 4709 GlobalPropertyAccess type = 4710 LookupGlobalProperty(variable, &lookup, false); 4711 4712 if (type == kUseCell && 4713 current_info()->global_object()->IsAccessCheckNeeded()) { 4714 type = kUseGeneric; 4715 } 4716 4717 if (type == kUseCell) { 4718 Handle<GlobalObject> global(current_info()->global_object()); 4719 Handle<PropertyCell> cell(global->GetPropertyCell(&lookup)); 4720 if (cell->type()->IsConstant()) { 4721 cell->AddDependentCompilationInfo(top_info()); 4722 Handle<Object> constant_object = cell->type()->AsConstant(); 4723 if (constant_object->IsConsString()) { 4724 constant_object = 4725 FlattenGetString(Handle<String>::cast(constant_object)); 4726 } 4727 HConstant* constant = New<HConstant>(constant_object); 4728 return ast_context()->ReturnInstruction(constant, expr->id()); 4729 } else { 4730 HLoadGlobalCell* instr = 4731 New<HLoadGlobalCell>(cell, lookup.GetPropertyDetails()); 4732 return ast_context()->ReturnInstruction(instr, expr->id()); 4733 } 4734 } else { 4735 HGlobalObject* global_object = Add<HGlobalObject>(); 4736 HLoadGlobalGeneric* instr = 4737 New<HLoadGlobalGeneric>(global_object, 4738 variable->name(), 4739 ast_context()->is_for_typeof()); 4740 return ast_context()->ReturnInstruction(instr, expr->id()); 4741 } 4742 } 4743 4744 case Variable::PARAMETER: 4745 case Variable::LOCAL: { 4746 HValue* value = LookupAndMakeLive(variable); 4747 if (value == graph()->GetConstantHole()) { 4748 ASSERT(IsDeclaredVariableMode(variable->mode()) && 4749 variable->mode() != VAR); 4750 return Bailout(kReferenceToUninitializedVariable); 4751 } 4752 return ast_context()->ReturnValue(value); 4753 } 4754 4755 case Variable::CONTEXT: { 4756 HValue* context = BuildContextChainWalk(variable); 4757 HLoadContextSlot* instr = new(zone()) HLoadContextSlot(context, variable); 4758 return ast_context()->ReturnInstruction(instr, expr->id()); 4759 } 4760 4761 case Variable::LOOKUP: 4762 return Bailout(kReferenceToAVariableWhichRequiresDynamicLookup); 4763 } 4764 } 4765 4766 4767 void HOptimizedGraphBuilder::VisitLiteral(Literal* expr) { 4768 ASSERT(!HasStackOverflow()); 4769 ASSERT(current_block() != NULL); 4770 ASSERT(current_block()->HasPredecessor()); 4771 HConstant* instr = New<HConstant>(expr->value()); 4772 return ast_context()->ReturnInstruction(instr, expr->id()); 4773 } 4774 4775 4776 void HOptimizedGraphBuilder::VisitRegExpLiteral(RegExpLiteral* expr) { 4777 ASSERT(!HasStackOverflow()); 4778 ASSERT(current_block() != NULL); 4779 ASSERT(current_block()->HasPredecessor()); 4780 Handle<JSFunction> closure = function_state()->compilation_info()->closure(); 4781 Handle<FixedArray> literals(closure->literals()); 4782 HRegExpLiteral* instr = New<HRegExpLiteral>(literals, 4783 expr->pattern(), 4784 expr->flags(), 4785 expr->literal_index()); 4786 return ast_context()->ReturnInstruction(instr, expr->id()); 4787 } 4788 4789 4790 static bool CanInlinePropertyAccess(Map* type) { 4791 return type->IsJSObjectMap() && 4792 !type->is_dictionary_map() && 4793 !type->has_named_interceptor(); 4794 } 4795 4796 4797 static void LookupInPrototypes(Handle<Map> map, 4798 Handle<String> name, 4799 LookupResult* lookup) { 4800 while (map->prototype()->IsJSObject()) { 4801 Handle<JSObject> holder(JSObject::cast(map->prototype())); 4802 map = Handle<Map>(holder->map()); 4803 if (!CanInlinePropertyAccess(*map)) break; 4804 map->LookupDescriptor(*holder, *name, lookup); 4805 if (lookup->IsFound()) return; 4806 } 4807 lookup->NotFound(); 4808 } 4809 4810 4811 // Tries to find a JavaScript accessor of the given name in the prototype chain 4812 // starting at the given map. Return true iff there is one, including the 4813 // corresponding AccessorPair plus its holder (which could be null when the 4814 // accessor is found directly in the given map). 4815 static bool LookupAccessorPair(Handle<Map> map, 4816 Handle<String> name, 4817 Handle<AccessorPair>* accessors, 4818 Handle<JSObject>* holder) { 4819 Isolate* isolate = map->GetIsolate(); 4820 LookupResult lookup(isolate); 4821 4822 // Check for a JavaScript accessor directly in the map. 4823 map->LookupDescriptor(NULL, *name, &lookup); 4824 if (lookup.IsPropertyCallbacks()) { 4825 Handle<Object> callback(lookup.GetValueFromMap(*map), isolate); 4826 if (!callback->IsAccessorPair()) return false; 4827 *accessors = Handle<AccessorPair>::cast(callback); 4828 *holder = Handle<JSObject>(); 4829 return true; 4830 } 4831 4832 // Everything else, e.g. a field, can't be an accessor call. 4833 if (lookup.IsFound()) return false; 4834 4835 // Check for a JavaScript accessor somewhere in the proto chain. 4836 LookupInPrototypes(map, name, &lookup); 4837 if (lookup.IsPropertyCallbacks()) { 4838 Handle<Object> callback(lookup.GetValue(), isolate); 4839 if (!callback->IsAccessorPair()) return false; 4840 *accessors = Handle<AccessorPair>::cast(callback); 4841 *holder = Handle<JSObject>(lookup.holder()); 4842 return true; 4843 } 4844 4845 // We haven't found a JavaScript accessor anywhere. 4846 return false; 4847 } 4848 4849 4850 static bool LookupSetter(Handle<Map> map, 4851 Handle<String> name, 4852 Handle<JSFunction>* setter, 4853 Handle<JSObject>* holder) { 4854 Handle<AccessorPair> accessors; 4855 if (LookupAccessorPair(map, name, &accessors, holder) && 4856 accessors->setter()->IsJSFunction()) { 4857 Handle<JSFunction> func(JSFunction::cast(accessors->setter())); 4858 CallOptimization call_optimization(func); 4859 // TODO(dcarney): temporary hack unless crankshaft can handle api calls. 4860 if (call_optimization.is_simple_api_call()) return false; 4861 *setter = func; 4862 return true; 4863 } 4864 return false; 4865 } 4866 4867 4868 // Determines whether the given array or object literal boilerplate satisfies 4869 // all limits to be considered for fast deep-copying and computes the total 4870 // size of all objects that are part of the graph. 4871 static bool IsFastLiteral(Handle<JSObject> boilerplate, 4872 int max_depth, 4873 int* max_properties) { 4874 if (boilerplate->map()->is_deprecated()) { 4875 Handle<Object> result = JSObject::TryMigrateInstance(boilerplate); 4876 if (result.is_null()) return false; 4877 } 4878 4879 ASSERT(max_depth >= 0 && *max_properties >= 0); 4880 if (max_depth == 0) return false; 4881 4882 Isolate* isolate = boilerplate->GetIsolate(); 4883 Handle<FixedArrayBase> elements(boilerplate->elements()); 4884 if (elements->length() > 0 && 4885 elements->map() != isolate->heap()->fixed_cow_array_map()) { 4886 if (boilerplate->HasFastObjectElements()) { 4887 Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements); 4888 int length = elements->length(); 4889 for (int i = 0; i < length; i++) { 4890 if ((*max_properties)-- == 0) return false; 4891 Handle<Object> value(fast_elements->get(i), isolate); 4892 if (value->IsJSObject()) { 4893 Handle<JSObject> value_object = Handle<JSObject>::cast(value); 4894 if (!IsFastLiteral(value_object, 4895 max_depth - 1, 4896 max_properties)) { 4897 return false; 4898 } 4899 } 4900 } 4901 } else if (!boilerplate->HasFastDoubleElements()) { 4902 return false; 4903 } 4904 } 4905 4906 Handle<FixedArray> properties(boilerplate->properties()); 4907 if (properties->length() > 0) { 4908 return false; 4909 } else { 4910 Handle<DescriptorArray> descriptors( 4911 boilerplate->map()->instance_descriptors()); 4912 int limit = boilerplate->map()->NumberOfOwnDescriptors(); 4913 for (int i = 0; i < limit; i++) { 4914 PropertyDetails details = descriptors->GetDetails(i); 4915 if (details.type() != FIELD) continue; 4916 int index = descriptors->GetFieldIndex(i); 4917 if ((*max_properties)-- == 0) return false; 4918 Handle<Object> value(boilerplate->InObjectPropertyAt(index), isolate); 4919 if (value->IsJSObject()) { 4920 Handle<JSObject> value_object = Handle<JSObject>::cast(value); 4921 if (!IsFastLiteral(value_object, 4922 max_depth - 1, 4923 max_properties)) { 4924 return false; 4925 } 4926 } 4927 } 4928 } 4929 return true; 4930 } 4931 4932 4933 void HOptimizedGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) { 4934 ASSERT(!HasStackOverflow()); 4935 ASSERT(current_block() != NULL); 4936 ASSERT(current_block()->HasPredecessor()); 4937 expr->BuildConstantProperties(isolate()); 4938 Handle<JSFunction> closure = function_state()->compilation_info()->closure(); 4939 HInstruction* literal; 4940 4941 // Check whether to use fast or slow deep-copying for boilerplate. 4942 int max_properties = kMaxFastLiteralProperties; 4943 Handle<Object> literals_cell(closure->literals()->get(expr->literal_index()), 4944 isolate()); 4945 Handle<AllocationSite> site; 4946 Handle<JSObject> boilerplate; 4947 if (!literals_cell->IsUndefined()) { 4948 // Retrieve the boilerplate 4949 site = Handle<AllocationSite>::cast(literals_cell); 4950 boilerplate = Handle<JSObject>(JSObject::cast(site->transition_info()), 4951 isolate()); 4952 } 4953 4954 if (!boilerplate.is_null() && 4955 IsFastLiteral(boilerplate, kMaxFastLiteralDepth, &max_properties)) { 4956 AllocationSiteUsageContext usage_context(isolate(), site, false); 4957 usage_context.EnterNewScope(); 4958 literal = BuildFastLiteral(boilerplate, &usage_context); 4959 usage_context.ExitScope(site, boilerplate); 4960 } else { 4961 NoObservableSideEffectsScope no_effects(this); 4962 Handle<FixedArray> closure_literals(closure->literals(), isolate()); 4963 Handle<FixedArray> constant_properties = expr->constant_properties(); 4964 int literal_index = expr->literal_index(); 4965 int flags = expr->fast_elements() 4966 ? ObjectLiteral::kFastElements : ObjectLiteral::kNoFlags; 4967 flags |= expr->has_function() 4968 ? ObjectLiteral::kHasFunction : ObjectLiteral::kNoFlags; 4969 4970 Add<HPushArgument>(Add<HConstant>(closure_literals)); 4971 Add<HPushArgument>(Add<HConstant>(literal_index)); 4972 Add<HPushArgument>(Add<HConstant>(constant_properties)); 4973 Add<HPushArgument>(Add<HConstant>(flags)); 4974 4975 // TODO(mvstanton): Add a flag to turn off creation of any 4976 // AllocationMementos for this call: we are in crankshaft and should have 4977 // learned enough about transition behavior to stop emitting mementos. 4978 Runtime::FunctionId function_id = Runtime::kCreateObjectLiteral; 4979 literal = Add<HCallRuntime>(isolate()->factory()->empty_string(), 4980 Runtime::FunctionForId(function_id), 4981 4); 4982 } 4983 4984 // The object is expected in the bailout environment during computation 4985 // of the property values and is the value of the entire expression. 4986 Push(literal); 4987 4988 expr->CalculateEmitStore(zone()); 4989 4990 for (int i = 0; i < expr->properties()->length(); i++) { 4991 ObjectLiteral::Property* property = expr->properties()->at(i); 4992 if (property->IsCompileTimeValue()) continue; 4993 4994 Literal* key = property->key(); 4995 Expression* value = property->value(); 4996 4997 switch (property->kind()) { 4998 case ObjectLiteral::Property::MATERIALIZED_LITERAL: 4999 ASSERT(!CompileTimeValue::IsCompileTimeValue(value)); 5000 // Fall through. 5001 case ObjectLiteral::Property::COMPUTED: 5002 if (key->value()->IsInternalizedString()) { 5003 if (property->emit_store()) { 5004 CHECK_ALIVE(VisitForValue(value)); 5005 HValue* value = Pop(); 5006 Handle<Map> map = property->GetReceiverType(); 5007 Handle<String> name = property->key()->AsPropertyName(); 5008 HInstruction* store; 5009 if (map.is_null()) { 5010 // If we don't know the monomorphic type, do a generic store. 5011 CHECK_ALIVE(store = BuildStoreNamedGeneric(literal, name, value)); 5012 } else { 5013 #if DEBUG 5014 Handle<JSFunction> setter; 5015 Handle<JSObject> holder; 5016 ASSERT(!LookupSetter(map, name, &setter, &holder)); 5017 #endif 5018 CHECK_ALIVE(store = BuildStoreNamedMonomorphic(literal, 5019 name, 5020 value, 5021 map)); 5022 } 5023 AddInstruction(store); 5024 if (store->HasObservableSideEffects()) { 5025 Add<HSimulate>(key->id(), REMOVABLE_SIMULATE); 5026 } 5027 } else { 5028 CHECK_ALIVE(VisitForEffect(value)); 5029 } 5030 break; 5031 } 5032 // Fall through. 5033 case ObjectLiteral::Property::PROTOTYPE: 5034 case ObjectLiteral::Property::SETTER: 5035 case ObjectLiteral::Property::GETTER: 5036 return Bailout(kObjectLiteralWithComplexProperty); 5037 default: UNREACHABLE(); 5038 } 5039 } 5040 5041 if (expr->has_function()) { 5042 // Return the result of the transformation to fast properties 5043 // instead of the original since this operation changes the map 5044 // of the object. This makes sure that the original object won't 5045 // be used by other optimized code before it is transformed 5046 // (e.g. because of code motion). 5047 HToFastProperties* result = Add<HToFastProperties>(Pop()); 5048 return ast_context()->ReturnValue(result); 5049 } else { 5050 return ast_context()->ReturnValue(Pop()); 5051 } 5052 } 5053 5054 5055 void HOptimizedGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) { 5056 ASSERT(!HasStackOverflow()); 5057 ASSERT(current_block() != NULL); 5058 ASSERT(current_block()->HasPredecessor()); 5059 expr->BuildConstantElements(isolate()); 5060 ZoneList<Expression*>* subexprs = expr->values(); 5061 int length = subexprs->length(); 5062 HInstruction* literal; 5063 5064 Handle<AllocationSite> site; 5065 Handle<FixedArray> literals(environment()->closure()->literals(), isolate()); 5066 bool uninitialized = false; 5067 Handle<Object> literals_cell(literals->get(expr->literal_index()), 5068 isolate()); 5069 Handle<JSObject> boilerplate_object; 5070 if (literals_cell->IsUndefined()) { 5071 uninitialized = true; 5072 Handle<Object> raw_boilerplate = Runtime::CreateArrayLiteralBoilerplate( 5073 isolate(), literals, expr->constant_elements()); 5074 if (raw_boilerplate.is_null()) { 5075 return Bailout(kArrayBoilerplateCreationFailed); 5076 } 5077 5078 boilerplate_object = Handle<JSObject>::cast(raw_boilerplate); 5079 AllocationSiteCreationContext creation_context(isolate()); 5080 site = creation_context.EnterNewScope(); 5081 if (JSObject::DeepWalk(boilerplate_object, &creation_context).is_null()) { 5082 return Bailout(kArrayBoilerplateCreationFailed); 5083 } 5084 creation_context.ExitScope(site, boilerplate_object); 5085 literals->set(expr->literal_index(), *site); 5086 5087 if (boilerplate_object->elements()->map() == 5088 isolate()->heap()->fixed_cow_array_map()) { 5089 isolate()->counters()->cow_arrays_created_runtime()->Increment(); 5090 } 5091 } else { 5092 ASSERT(literals_cell->IsAllocationSite()); 5093 site = Handle<AllocationSite>::cast(literals_cell); 5094 boilerplate_object = Handle<JSObject>( 5095 JSObject::cast(site->transition_info()), isolate()); 5096 } 5097 5098 ASSERT(!boilerplate_object.is_null()); 5099 ASSERT(site->SitePointsToLiteral()); 5100 5101 ElementsKind boilerplate_elements_kind = 5102 boilerplate_object->GetElementsKind(); 5103 5104 // Check whether to use fast or slow deep-copying for boilerplate. 5105 int max_properties = kMaxFastLiteralProperties; 5106 if (IsFastLiteral(boilerplate_object, 5107 kMaxFastLiteralDepth, 5108 &max_properties)) { 5109 AllocationSiteUsageContext usage_context(isolate(), site, false); 5110 usage_context.EnterNewScope(); 5111 literal = BuildFastLiteral(boilerplate_object, &usage_context); 5112 usage_context.ExitScope(site, boilerplate_object); 5113 } else { 5114 NoObservableSideEffectsScope no_effects(this); 5115 // Boilerplate already exists and constant elements are never accessed, 5116 // pass an empty fixed array to the runtime function instead. 5117 Handle<FixedArray> constants = isolate()->factory()->empty_fixed_array(); 5118 int literal_index = expr->literal_index(); 5119 int flags = expr->depth() == 1 5120 ? ArrayLiteral::kShallowElements 5121 : ArrayLiteral::kNoFlags; 5122 flags |= ArrayLiteral::kDisableMementos; 5123 5124 Add<HPushArgument>(Add<HConstant>(literals)); 5125 Add<HPushArgument>(Add<HConstant>(literal_index)); 5126 Add<HPushArgument>(Add<HConstant>(constants)); 5127 Add<HPushArgument>(Add<HConstant>(flags)); 5128 5129 // TODO(mvstanton): Consider a flag to turn off creation of any 5130 // AllocationMementos for this call: we are in crankshaft and should have 5131 // learned enough about transition behavior to stop emitting mementos. 5132 Runtime::FunctionId function_id = Runtime::kCreateArrayLiteral; 5133 literal = Add<HCallRuntime>(isolate()->factory()->empty_string(), 5134 Runtime::FunctionForId(function_id), 5135 4); 5136 5137 // De-opt if elements kind changed from boilerplate_elements_kind. 5138 Handle<Map> map = Handle<Map>(boilerplate_object->map(), isolate()); 5139 literal = Add<HCheckMaps>(literal, map, top_info()); 5140 } 5141 5142 // The array is expected in the bailout environment during computation 5143 // of the property values and is the value of the entire expression. 5144 Push(literal); 5145 // The literal index is on the stack, too. 5146 Push(Add<HConstant>(expr->literal_index())); 5147 5148 HInstruction* elements = NULL; 5149 5150 for (int i = 0; i < length; i++) { 5151 Expression* subexpr = subexprs->at(i); 5152 // If the subexpression is a literal or a simple materialized literal it 5153 // is already set in the cloned array. 5154 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue; 5155 5156 CHECK_ALIVE(VisitForValue(subexpr)); 5157 HValue* value = Pop(); 5158 if (!Smi::IsValid(i)) return Bailout(kNonSmiKeyInArrayLiteral); 5159 5160 elements = AddLoadElements(literal); 5161 5162 HValue* key = Add<HConstant>(i); 5163 5164 switch (boilerplate_elements_kind) { 5165 case FAST_SMI_ELEMENTS: 5166 case FAST_HOLEY_SMI_ELEMENTS: 5167 case FAST_ELEMENTS: 5168 case FAST_HOLEY_ELEMENTS: 5169 case FAST_DOUBLE_ELEMENTS: 5170 case FAST_HOLEY_DOUBLE_ELEMENTS: { 5171 HStoreKeyed* instr = Add<HStoreKeyed>(elements, key, value, 5172 boilerplate_elements_kind); 5173 instr->SetUninitialized(uninitialized); 5174 break; 5175 } 5176 default: 5177 UNREACHABLE(); 5178 break; 5179 } 5180 5181 Add<HSimulate>(expr->GetIdForElement(i)); 5182 } 5183 5184 Drop(1); // array literal index 5185 return ast_context()->ReturnValue(Pop()); 5186 } 5187 5188 5189 HCheckMaps* HOptimizedGraphBuilder::AddCheckMap(HValue* object, 5190 Handle<Map> map) { 5191 BuildCheckHeapObject(object); 5192 return Add<HCheckMaps>(object, map, top_info()); 5193 } 5194 5195 5196 HInstruction* HOptimizedGraphBuilder::BuildStoreNamedField( 5197 HValue* checked_object, 5198 Handle<String> name, 5199 HValue* value, 5200 Handle<Map> map, 5201 LookupResult* lookup) { 5202 ASSERT(lookup->IsFound()); 5203 // If the property does not exist yet, we have to check that it wasn't made 5204 // readonly or turned into a setter by some meanwhile modifications on the 5205 // prototype chain. 5206 if (!lookup->IsProperty() && map->prototype()->IsJSReceiver()) { 5207 Object* proto = map->prototype(); 5208 // First check that the prototype chain isn't affected already. 5209 LookupResult proto_result(isolate()); 5210 proto->Lookup(*name, &proto_result); 5211 if (proto_result.IsProperty()) { 5212 // If the inherited property could induce readonly-ness, bail out. 5213 if (proto_result.IsReadOnly() || !proto_result.IsCacheable()) { 5214 Bailout(kImproperObjectOnPrototypeChainForStore); 5215 return NULL; 5216 } 5217 // We only need to check up to the preexisting property. 5218 proto = proto_result.holder(); 5219 } else { 5220 // Otherwise, find the top prototype. 5221 while (proto->GetPrototype(isolate())->IsJSObject()) { 5222 proto = proto->GetPrototype(isolate()); 5223 } 5224 ASSERT(proto->GetPrototype(isolate())->IsNull()); 5225 } 5226 ASSERT(proto->IsJSObject()); 5227 BuildCheckPrototypeMaps( 5228 Handle<JSObject>(JSObject::cast(map->prototype())), 5229 Handle<JSObject>(JSObject::cast(proto))); 5230 } 5231 5232 HObjectAccess field_access = HObjectAccess::ForField(map, lookup, name); 5233 bool transition_to_field = lookup->IsTransitionToField(); 5234 5235 HStoreNamedField *instr; 5236 if (FLAG_track_double_fields && field_access.representation().IsDouble()) { 5237 HObjectAccess heap_number_access = 5238 field_access.WithRepresentation(Representation::Tagged()); 5239 if (transition_to_field) { 5240 // The store requires a mutable HeapNumber to be allocated. 5241 NoObservableSideEffectsScope no_side_effects(this); 5242 HInstruction* heap_number_size = Add<HConstant>(HeapNumber::kSize); 5243 HInstruction* heap_number = Add<HAllocate>(heap_number_size, 5244 HType::HeapNumber(), isolate()->heap()->GetPretenureMode(), 5245 HEAP_NUMBER_TYPE); 5246 AddStoreMapConstant(heap_number, isolate()->factory()->heap_number_map()); 5247 Add<HStoreNamedField>(heap_number, HObjectAccess::ForHeapNumberValue(), 5248 value); 5249 instr = New<HStoreNamedField>(checked_object->ActualValue(), 5250 heap_number_access, 5251 heap_number); 5252 } else { 5253 // Already holds a HeapNumber; load the box and write its value field. 5254 HInstruction* heap_number = Add<HLoadNamedField>(checked_object, 5255 heap_number_access); 5256 heap_number->set_type(HType::HeapNumber()); 5257 instr = New<HStoreNamedField>(heap_number, 5258 HObjectAccess::ForHeapNumberValue(), 5259 value); 5260 } 5261 } else { 5262 // This is a normal store. 5263 instr = New<HStoreNamedField>(checked_object->ActualValue(), 5264 field_access, 5265 value); 5266 } 5267 5268 if (transition_to_field) { 5269 Handle<Map> transition(lookup->GetTransitionTarget()); 5270 HConstant* transition_constant = Add<HConstant>(transition); 5271 instr->SetTransition(transition_constant, top_info()); 5272 // TODO(fschneider): Record the new map type of the object in the IR to 5273 // enable elimination of redundant checks after the transition store. 5274 instr->SetGVNFlag(kChangesMaps); 5275 } 5276 return instr; 5277 } 5278 5279 5280 HInstruction* HOptimizedGraphBuilder::BuildStoreNamedGeneric( 5281 HValue* object, 5282 Handle<String> name, 5283 HValue* value) { 5284 return New<HStoreNamedGeneric>( 5285 object, 5286 name, 5287 value, 5288 function_strict_mode_flag()); 5289 } 5290 5291 5292 // Sets the lookup result and returns true if the load/store can be inlined. 5293 static bool ComputeStoreField(Handle<Map> type, 5294 Handle<String> name, 5295 LookupResult* lookup, 5296 bool lookup_transition = true) { 5297 ASSERT(!type->is_observed()); 5298 if (!CanInlinePropertyAccess(*type)) { 5299 lookup->NotFound(); 5300 return false; 5301 } 5302 // If we directly find a field, the access can be inlined. 5303 type->LookupDescriptor(NULL, *name, lookup); 5304 if (lookup->IsField()) return true; 5305 5306 if (!lookup_transition) return false; 5307 5308 type->LookupTransition(NULL, *name, lookup); 5309 return lookup->IsTransitionToField() && 5310 (type->unused_property_fields() > 0); 5311 } 5312 5313 5314 HInstruction* HOptimizedGraphBuilder::BuildStoreNamedMonomorphic( 5315 HValue* object, 5316 Handle<String> name, 5317 HValue* value, 5318 Handle<Map> map) { 5319 // Handle a store to a known field. 5320 LookupResult lookup(isolate()); 5321 if (ComputeStoreField(map, name, &lookup)) { 5322 HCheckMaps* checked_object = AddCheckMap(object, map); 5323 return BuildStoreNamedField(checked_object, name, value, map, &lookup); 5324 } 5325 5326 // No luck, do a generic store. 5327 return BuildStoreNamedGeneric(object, name, value); 5328 } 5329 5330 5331 bool HOptimizedGraphBuilder::PropertyAccessInfo::IsCompatibleForLoad( 5332 PropertyAccessInfo* info) { 5333 if (!CanInlinePropertyAccess(*map_)) return false; 5334 5335 if (!LookupDescriptor()) return false; 5336 5337 if (!lookup_.IsFound()) { 5338 return (!info->lookup_.IsFound() || info->has_holder()) && 5339 map_->prototype() == info->map_->prototype(); 5340 } 5341 5342 // Mismatch if the other access info found the property in the prototype 5343 // chain. 5344 if (info->has_holder()) return false; 5345 5346 if (lookup_.IsPropertyCallbacks()) { 5347 return accessor_.is_identical_to(info->accessor_); 5348 } 5349 5350 if (lookup_.IsConstant()) { 5351 return constant_.is_identical_to(info->constant_); 5352 } 5353 5354 ASSERT(lookup_.IsField()); 5355 if (!info->lookup_.IsField()) return false; 5356 5357 Representation r = access_.representation(); 5358 if (!info->access_.representation().IsCompatibleForLoad(r)) return false; 5359 if (info->access_.offset() != access_.offset()) return false; 5360 if (info->access_.IsInobject() != access_.IsInobject()) return false; 5361 info->GeneralizeRepresentation(r); 5362 return true; 5363 } 5364 5365 5366 bool HOptimizedGraphBuilder::PropertyAccessInfo::LookupDescriptor() { 5367 map_->LookupDescriptor(NULL, *name_, &lookup_); 5368 return LoadResult(map_); 5369 } 5370 5371 5372 bool HOptimizedGraphBuilder::PropertyAccessInfo::LoadResult(Handle<Map> map) { 5373 if (lookup_.IsField()) { 5374 access_ = HObjectAccess::ForField(map, &lookup_, name_); 5375 } else if (lookup_.IsPropertyCallbacks()) { 5376 Handle<Object> callback(lookup_.GetValueFromMap(*map), isolate()); 5377 if (!callback->IsAccessorPair()) return false; 5378 Object* getter = Handle<AccessorPair>::cast(callback)->getter(); 5379 if (!getter->IsJSFunction()) return false; 5380 Handle<JSFunction> accessor = handle(JSFunction::cast(getter)); 5381 CallOptimization call_optimization(accessor); 5382 // TODO(dcarney): temporary hack unless crankshaft can handle api calls. 5383 if (call_optimization.is_simple_api_call()) return false; 5384 accessor_ = accessor; 5385 } else if (lookup_.IsConstant()) { 5386 constant_ = handle(lookup_.GetConstantFromMap(*map), isolate()); 5387 } 5388 5389 return true; 5390 } 5391 5392 5393 bool HOptimizedGraphBuilder::PropertyAccessInfo::LookupInPrototypes() { 5394 Handle<Map> map = map_; 5395 while (map->prototype()->IsJSObject()) { 5396 holder_ = handle(JSObject::cast(map->prototype())); 5397 if (holder_->map()->is_deprecated()) { 5398 JSObject::TryMigrateInstance(holder_); 5399 } 5400 map = Handle<Map>(holder_->map()); 5401 if (!CanInlinePropertyAccess(*map)) { 5402 lookup_.NotFound(); 5403 return false; 5404 } 5405 map->LookupDescriptor(*holder_, *name_, &lookup_); 5406 if (lookup_.IsFound()) return LoadResult(map); 5407 } 5408 lookup_.NotFound(); 5409 return true; 5410 } 5411 5412 5413 bool HOptimizedGraphBuilder::PropertyAccessInfo::CanLoadMonomorphic() { 5414 if (!CanInlinePropertyAccess(*map_)) return IsStringLength(); 5415 if (IsJSObjectFieldAccessor()) return true; 5416 if (!LookupDescriptor()) return false; 5417 if (lookup_.IsFound()) return true; 5418 return LookupInPrototypes(); 5419 } 5420 5421 5422 bool HOptimizedGraphBuilder::PropertyAccessInfo::CanLoadAsMonomorphic( 5423 SmallMapList* types) { 5424 ASSERT(map_.is_identical_to(types->first())); 5425 if (!CanLoadMonomorphic()) return false; 5426 if (types->length() > kMaxLoadPolymorphism) return false; 5427 5428 if (IsStringLength()) { 5429 for (int i = 1; i < types->length(); ++i) { 5430 if (types->at(i)->instance_type() >= FIRST_NONSTRING_TYPE) return false; 5431 } 5432 return true; 5433 } 5434 5435 if (IsArrayLength()) { 5436 bool is_fast = IsFastElementsKind(map_->elements_kind()); 5437 for (int i = 1; i < types->length(); ++i) { 5438 Handle<Map> test_map = types->at(i); 5439 if (test_map->instance_type() != JS_ARRAY_TYPE) return false; 5440 if (IsFastElementsKind(test_map->elements_kind()) != is_fast) { 5441 return false; 5442 } 5443 } 5444 return true; 5445 } 5446 5447 if (IsJSObjectFieldAccessor()) { 5448 InstanceType instance_type = map_->instance_type(); 5449 for (int i = 1; i < types->length(); ++i) { 5450 if (types->at(i)->instance_type() != instance_type) return false; 5451 } 5452 return true; 5453 } 5454 5455 for (int i = 1; i < types->length(); ++i) { 5456 PropertyAccessInfo test_info(isolate(), types->at(i), name_); 5457 if (!test_info.IsCompatibleForLoad(this)) return false; 5458 } 5459 5460 return true; 5461 } 5462 5463 5464 HInstruction* HOptimizedGraphBuilder::BuildLoadMonomorphic( 5465 PropertyAccessInfo* info, 5466 HValue* object, 5467 HInstruction* checked_object, 5468 BailoutId ast_id, 5469 BailoutId return_id, 5470 bool can_inline_accessor) { 5471 5472 HObjectAccess access = HObjectAccess::ForMap(); // bogus default 5473 if (info->GetJSObjectFieldAccess(&access)) { 5474 return New<HLoadNamedField>(checked_object, access); 5475 } 5476 5477 HValue* checked_holder = checked_object; 5478 if (info->has_holder()) { 5479 Handle<JSObject> prototype(JSObject::cast(info->map()->prototype())); 5480 checked_holder = BuildCheckPrototypeMaps(prototype, info->holder()); 5481 } 5482 5483 if (!info->lookup()->IsFound()) return graph()->GetConstantUndefined(); 5484 5485 if (info->lookup()->IsField()) { 5486 return BuildLoadNamedField(checked_holder, info->access()); 5487 } 5488 5489 if (info->lookup()->IsPropertyCallbacks()) { 5490 Push(checked_object); 5491 if (FLAG_inline_accessors && 5492 can_inline_accessor && 5493 TryInlineGetter(info->accessor(), ast_id, return_id)) { 5494 return NULL; 5495 } 5496 Add<HPushArgument>(Pop()); 5497 return New<HCallConstantFunction>(info->accessor(), 1); 5498 } 5499 5500 ASSERT(info->lookup()->IsConstant()); 5501 return New<HConstant>(info->constant()); 5502 } 5503 5504 5505 void HOptimizedGraphBuilder::HandlePolymorphicLoadNamedField( 5506 BailoutId ast_id, 5507 BailoutId return_id, 5508 HValue* object, 5509 SmallMapList* types, 5510 Handle<String> name) { 5511 // Something did not match; must use a polymorphic load. 5512 int count = 0; 5513 HBasicBlock* join = NULL; 5514 for (int i = 0; i < types->length() && count < kMaxLoadPolymorphism; ++i) { 5515 PropertyAccessInfo info(isolate(), types->at(i), name); 5516 if (info.CanLoadMonomorphic()) { 5517 if (count == 0) { 5518 BuildCheckHeapObject(object); 5519 join = graph()->CreateBasicBlock(); 5520 } 5521 ++count; 5522 HBasicBlock* if_true = graph()->CreateBasicBlock(); 5523 HBasicBlock* if_false = graph()->CreateBasicBlock(); 5524 HCompareMap* compare = New<HCompareMap>( 5525 object, info.map(), if_true, if_false); 5526 FinishCurrentBlock(compare); 5527 5528 set_current_block(if_true); 5529 5530 HInstruction* load = BuildLoadMonomorphic( 5531 &info, object, compare, ast_id, return_id, FLAG_polymorphic_inlining); 5532 if (load == NULL) { 5533 if (HasStackOverflow()) return; 5534 } else { 5535 if (!load->IsLinked()) { 5536 AddInstruction(load); 5537 } 5538 if (!ast_context()->IsEffect()) Push(load); 5539 } 5540 5541 if (current_block() != NULL) Goto(join); 5542 set_current_block(if_false); 5543 } 5544 } 5545 5546 // Finish up. Unconditionally deoptimize if we've handled all the maps we 5547 // know about and do not want to handle ones we've never seen. Otherwise 5548 // use a generic IC. 5549 if (count == types->length() && FLAG_deoptimize_uncommon_cases) { 5550 // Because the deopt may be the only path in the polymorphic load, make sure 5551 // that the environment stack matches the depth on deopt that it otherwise 5552 // would have had after a successful load. 5553 if (!ast_context()->IsEffect()) Push(graph()->GetConstant0()); 5554 FinishExitWithHardDeoptimization("Unknown map in polymorphic load", join); 5555 } else { 5556 HInstruction* load = Add<HLoadNamedGeneric>(object, name); 5557 if (!ast_context()->IsEffect()) Push(load); 5558 5559 if (join != NULL) { 5560 Goto(join); 5561 } else { 5562 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE); 5563 if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop()); 5564 return; 5565 } 5566 } 5567 5568 ASSERT(join != NULL); 5569 join->SetJoinId(ast_id); 5570 set_current_block(join); 5571 if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop()); 5572 } 5573 5574 5575 bool HOptimizedGraphBuilder::TryStorePolymorphicAsMonomorphic( 5576 BailoutId assignment_id, 5577 HValue* object, 5578 HValue* value, 5579 SmallMapList* types, 5580 Handle<String> name) { 5581 // Use monomorphic store if property lookup results in the same field index 5582 // for all maps. Requires special map check on the set of all handled maps. 5583 if (types->length() > kMaxStorePolymorphism) return false; 5584 5585 LookupResult lookup(isolate()); 5586 int count; 5587 Representation representation = Representation::None(); 5588 HObjectAccess access = HObjectAccess::ForMap(); // initial value unused. 5589 for (count = 0; count < types->length(); ++count) { 5590 Handle<Map> map = types->at(count); 5591 // Pass false to ignore transitions. 5592 if (!ComputeStoreField(map, name, &lookup, false)) break; 5593 ASSERT(!map->is_observed()); 5594 5595 HObjectAccess new_access = HObjectAccess::ForField(map, &lookup, name); 5596 Representation new_representation = new_access.representation(); 5597 5598 if (count == 0) { 5599 // First time through the loop; set access and representation. 5600 access = new_access; 5601 representation = new_representation; 5602 } else if (!representation.IsCompatibleForStore(new_representation)) { 5603 // Representations did not match. 5604 break; 5605 } else if (access.offset() != new_access.offset()) { 5606 // Offsets did not match. 5607 break; 5608 } else if (access.IsInobject() != new_access.IsInobject()) { 5609 // In-objectness did not match. 5610 break; 5611 } 5612 } 5613 5614 if (count != types->length()) return false; 5615 5616 // Everything matched; can use monomorphic store. 5617 BuildCheckHeapObject(object); 5618 HCheckMaps* checked_object = Add<HCheckMaps>(object, types); 5619 HInstruction* store; 5620 CHECK_ALIVE_OR_RETURN( 5621 store = BuildStoreNamedField( 5622 checked_object, name, value, types->at(count - 1), &lookup), 5623 true); 5624 if (!ast_context()->IsEffect()) Push(value); 5625 AddInstruction(store); 5626 Add<HSimulate>(assignment_id); 5627 if (!ast_context()->IsEffect()) Drop(1); 5628 ast_context()->ReturnValue(value); 5629 return true; 5630 } 5631 5632 5633 void HOptimizedGraphBuilder::HandlePolymorphicStoreNamedField( 5634 BailoutId assignment_id, 5635 HValue* object, 5636 HValue* value, 5637 SmallMapList* types, 5638 Handle<String> name) { 5639 if (TryStorePolymorphicAsMonomorphic( 5640 assignment_id, object, value, types, name)) { 5641 return; 5642 } 5643 5644 // TODO(ager): We should recognize when the prototype chains for different 5645 // maps are identical. In that case we can avoid repeatedly generating the 5646 // same prototype map checks. 5647 int count = 0; 5648 HBasicBlock* join = NULL; 5649 for (int i = 0; i < types->length() && count < kMaxStorePolymorphism; ++i) { 5650 Handle<Map> map = types->at(i); 5651 LookupResult lookup(isolate()); 5652 if (ComputeStoreField(map, name, &lookup)) { 5653 if (count == 0) { 5654 BuildCheckHeapObject(object); 5655 join = graph()->CreateBasicBlock(); 5656 } 5657 ++count; 5658 HBasicBlock* if_true = graph()->CreateBasicBlock(); 5659 HBasicBlock* if_false = graph()->CreateBasicBlock(); 5660 HCompareMap* compare = New<HCompareMap>(object, map, if_true, if_false); 5661 FinishCurrentBlock(compare); 5662 5663 set_current_block(if_true); 5664 HInstruction* instr; 5665 CHECK_ALIVE(instr = BuildStoreNamedField( 5666 compare, name, value, map, &lookup)); 5667 // Goto will add the HSimulate for the store. 5668 AddInstruction(instr); 5669 if (!ast_context()->IsEffect()) Push(value); 5670 Goto(join); 5671 5672 set_current_block(if_false); 5673 } 5674 } 5675 5676 // Finish up. Unconditionally deoptimize if we've handled all the maps we 5677 // know about and do not want to handle ones we've never seen. Otherwise 5678 // use a generic IC. 5679 if (count == types->length() && FLAG_deoptimize_uncommon_cases) { 5680 FinishExitWithHardDeoptimization("Unknown map in polymorphic store", join); 5681 } else { 5682 HInstruction* instr = BuildStoreNamedGeneric(object, name, value); 5683 AddInstruction(instr); 5684 5685 if (join != NULL) { 5686 if (!ast_context()->IsEffect()) { 5687 Push(value); 5688 } 5689 Goto(join); 5690 } else { 5691 // The HSimulate for the store should not see the stored value in 5692 // effect contexts (it is not materialized at expr->id() in the 5693 // unoptimized code). 5694 if (instr->HasObservableSideEffects()) { 5695 if (ast_context()->IsEffect()) { 5696 Add<HSimulate>(assignment_id, REMOVABLE_SIMULATE); 5697 } else { 5698 Push(value); 5699 Add<HSimulate>(assignment_id, REMOVABLE_SIMULATE); 5700 Drop(1); 5701 } 5702 } 5703 return ast_context()->ReturnValue(value); 5704 } 5705 } 5706 5707 ASSERT(join != NULL); 5708 join->SetJoinId(assignment_id); 5709 set_current_block(join); 5710 if (!ast_context()->IsEffect()) { 5711 ast_context()->ReturnValue(Pop()); 5712 } 5713 } 5714 5715 5716 static bool ComputeReceiverTypes(Expression* expr, 5717 HValue* receiver, 5718 SmallMapList** t) { 5719 SmallMapList* types = expr->GetReceiverTypes(); 5720 *t = types; 5721 bool monomorphic = expr->IsMonomorphic(); 5722 if (types != NULL && receiver->HasMonomorphicJSObjectType()) { 5723 Map* root_map = receiver->GetMonomorphicJSObjectMap()->FindRootMap(); 5724 types->FilterForPossibleTransitions(root_map); 5725 monomorphic = types->length() == 1; 5726 } 5727 return monomorphic && CanInlinePropertyAccess(*types->first()); 5728 } 5729 5730 5731 void HOptimizedGraphBuilder::BuildStore(Expression* expr, 5732 Property* prop, 5733 BailoutId ast_id, 5734 BailoutId return_id, 5735 bool is_uninitialized) { 5736 HValue* value = environment()->ExpressionStackAt(0); 5737 5738 if (!prop->key()->IsPropertyName()) { 5739 // Keyed store. 5740 HValue* key = environment()->ExpressionStackAt(1); 5741 HValue* object = environment()->ExpressionStackAt(2); 5742 bool has_side_effects = false; 5743 HandleKeyedElementAccess(object, key, value, expr, 5744 true, // is_store 5745 &has_side_effects); 5746 Drop(3); 5747 Push(value); 5748 Add<HSimulate>(return_id, REMOVABLE_SIMULATE); 5749 return ast_context()->ReturnValue(Pop()); 5750 } 5751 5752 // Named store. 5753 HValue* object = environment()->ExpressionStackAt(1); 5754 5755 if (is_uninitialized) { 5756 Add<HDeoptimize>("Insufficient type feedback for property assignment", 5757 Deoptimizer::SOFT); 5758 } 5759 5760 Literal* key = prop->key()->AsLiteral(); 5761 Handle<String> name = Handle<String>::cast(key->value()); 5762 ASSERT(!name.is_null()); 5763 5764 HInstruction* instr = NULL; 5765 5766 SmallMapList* types; 5767 bool monomorphic = ComputeReceiverTypes(expr, object, &types); 5768 5769 if (monomorphic) { 5770 Handle<Map> map = types->first(); 5771 Handle<JSFunction> setter; 5772 Handle<JSObject> holder; 5773 if (LookupSetter(map, name, &setter, &holder)) { 5774 AddCheckConstantFunction(holder, object, map); 5775 if (FLAG_inline_accessors && 5776 TryInlineSetter(setter, ast_id, return_id, value)) { 5777 return; 5778 } 5779 Drop(2); 5780 Add<HPushArgument>(object); 5781 Add<HPushArgument>(value); 5782 instr = New<HCallConstantFunction>(setter, 2); 5783 } else { 5784 Drop(2); 5785 CHECK_ALIVE(instr = BuildStoreNamedMonomorphic(object, 5786 name, 5787 value, 5788 map)); 5789 } 5790 } else if (types != NULL && types->length() > 1) { 5791 Drop(2); 5792 return HandlePolymorphicStoreNamedField(ast_id, object, value, types, name); 5793 } else { 5794 Drop(2); 5795 instr = BuildStoreNamedGeneric(object, name, value); 5796 } 5797 5798 if (!ast_context()->IsEffect()) Push(value); 5799 AddInstruction(instr); 5800 if (instr->HasObservableSideEffects()) { 5801 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE); 5802 } 5803 if (!ast_context()->IsEffect()) Drop(1); 5804 return ast_context()->ReturnValue(value); 5805 } 5806 5807 5808 void HOptimizedGraphBuilder::HandlePropertyAssignment(Assignment* expr) { 5809 Property* prop = expr->target()->AsProperty(); 5810 ASSERT(prop != NULL); 5811 CHECK_ALIVE(VisitForValue(prop->obj())); 5812 if (!prop->key()->IsPropertyName()) { 5813 CHECK_ALIVE(VisitForValue(prop->key())); 5814 } 5815 CHECK_ALIVE(VisitForValue(expr->value())); 5816 BuildStore(expr, prop, expr->id(), 5817 expr->AssignmentId(), expr->IsUninitialized()); 5818 } 5819 5820 5821 // Because not every expression has a position and there is not common 5822 // superclass of Assignment and CountOperation, we cannot just pass the 5823 // owning expression instead of position and ast_id separately. 5824 void HOptimizedGraphBuilder::HandleGlobalVariableAssignment( 5825 Variable* var, 5826 HValue* value, 5827 BailoutId ast_id) { 5828 LookupResult lookup(isolate()); 5829 GlobalPropertyAccess type = LookupGlobalProperty(var, &lookup, true); 5830 if (type == kUseCell) { 5831 Handle<GlobalObject> global(current_info()->global_object()); 5832 Handle<PropertyCell> cell(global->GetPropertyCell(&lookup)); 5833 if (cell->type()->IsConstant()) { 5834 IfBuilder builder(this); 5835 HValue* constant = Add<HConstant>(cell->type()->AsConstant()); 5836 if (cell->type()->AsConstant()->IsNumber()) { 5837 builder.If<HCompareNumericAndBranch>(value, constant, Token::EQ); 5838 } else { 5839 builder.If<HCompareObjectEqAndBranch>(value, constant); 5840 } 5841 builder.Then(); 5842 builder.Else(); 5843 Add<HDeoptimize>("Constant global variable assignment", 5844 Deoptimizer::EAGER); 5845 builder.End(); 5846 } 5847 HInstruction* instr = 5848 Add<HStoreGlobalCell>(value, cell, lookup.GetPropertyDetails()); 5849 if (instr->HasObservableSideEffects()) { 5850 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE); 5851 } 5852 } else { 5853 HGlobalObject* global_object = Add<HGlobalObject>(); 5854 HStoreGlobalGeneric* instr = 5855 Add<HStoreGlobalGeneric>(global_object, var->name(), 5856 value, function_strict_mode_flag()); 5857 USE(instr); 5858 ASSERT(instr->HasObservableSideEffects()); 5859 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE); 5860 } 5861 } 5862 5863 5864 void HOptimizedGraphBuilder::HandleCompoundAssignment(Assignment* expr) { 5865 Expression* target = expr->target(); 5866 VariableProxy* proxy = target->AsVariableProxy(); 5867 Property* prop = target->AsProperty(); 5868 ASSERT(proxy == NULL || prop == NULL); 5869 5870 // We have a second position recorded in the FullCodeGenerator to have 5871 // type feedback for the binary operation. 5872 BinaryOperation* operation = expr->binary_operation(); 5873 5874 if (proxy != NULL) { 5875 Variable* var = proxy->var(); 5876 if (var->mode() == LET) { 5877 return Bailout(kUnsupportedLetCompoundAssignment); 5878 } 5879 5880 CHECK_ALIVE(VisitForValue(operation)); 5881 5882 switch (var->location()) { 5883 case Variable::UNALLOCATED: 5884 HandleGlobalVariableAssignment(var, 5885 Top(), 5886 expr->AssignmentId()); 5887 break; 5888 5889 case Variable::PARAMETER: 5890 case Variable::LOCAL: 5891 if (var->mode() == CONST) { 5892 return Bailout(kUnsupportedConstCompoundAssignment); 5893 } 5894 BindIfLive(var, Top()); 5895 break; 5896 5897 case Variable::CONTEXT: { 5898 // Bail out if we try to mutate a parameter value in a function 5899 // using the arguments object. We do not (yet) correctly handle the 5900 // arguments property of the function. 5901 if (current_info()->scope()->arguments() != NULL) { 5902 // Parameters will be allocated to context slots. We have no 5903 // direct way to detect that the variable is a parameter so we do 5904 // a linear search of the parameter variables. 5905 int count = current_info()->scope()->num_parameters(); 5906 for (int i = 0; i < count; ++i) { 5907 if (var == current_info()->scope()->parameter(i)) { 5908 Bailout(kAssignmentToParameterFunctionUsesArgumentsObject); 5909 } 5910 } 5911 } 5912 5913 HStoreContextSlot::Mode mode; 5914 5915 switch (var->mode()) { 5916 case LET: 5917 mode = HStoreContextSlot::kCheckDeoptimize; 5918 break; 5919 case CONST: 5920 return ast_context()->ReturnValue(Pop()); 5921 case CONST_HARMONY: 5922 // This case is checked statically so no need to 5923 // perform checks here 5924 UNREACHABLE(); 5925 default: 5926 mode = HStoreContextSlot::kNoCheck; 5927 } 5928 5929 HValue* context = BuildContextChainWalk(var); 5930 HStoreContextSlot* instr = Add<HStoreContextSlot>( 5931 context, var->index(), mode, Top()); 5932 if (instr->HasObservableSideEffects()) { 5933 Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE); 5934 } 5935 break; 5936 } 5937 5938 case Variable::LOOKUP: 5939 return Bailout(kCompoundAssignmentToLookupSlot); 5940 } 5941 return ast_context()->ReturnValue(Pop()); 5942 5943 } else if (prop != NULL) { 5944 CHECK_ALIVE(VisitForValue(prop->obj())); 5945 HValue* object = Top(); 5946 HValue* key = NULL; 5947 if ((!prop->IsFunctionPrototype() && !prop->key()->IsPropertyName()) || 5948 prop->IsStringAccess()) { 5949 CHECK_ALIVE(VisitForValue(prop->key())); 5950 key = Top(); 5951 } 5952 5953 CHECK_ALIVE(PushLoad(prop, object, key)); 5954 5955 CHECK_ALIVE(VisitForValue(expr->value())); 5956 HValue* right = Pop(); 5957 HValue* left = Pop(); 5958 5959 Push(BuildBinaryOperation(operation, left, right)); 5960 BuildStore(expr, prop, expr->id(), 5961 expr->AssignmentId(), expr->IsUninitialized()); 5962 } else { 5963 return Bailout(kInvalidLhsInCompoundAssignment); 5964 } 5965 } 5966 5967 5968 void HOptimizedGraphBuilder::VisitAssignment(Assignment* expr) { 5969 ASSERT(!HasStackOverflow()); 5970 ASSERT(current_block() != NULL); 5971 ASSERT(current_block()->HasPredecessor()); 5972 VariableProxy* proxy = expr->target()->AsVariableProxy(); 5973 Property* prop = expr->target()->AsProperty(); 5974 ASSERT(proxy == NULL || prop == NULL); 5975 5976 if (expr->is_compound()) { 5977 HandleCompoundAssignment(expr); 5978 return; 5979 } 5980 5981 if (prop != NULL) { 5982 HandlePropertyAssignment(expr); 5983 } else if (proxy != NULL) { 5984 Variable* var = proxy->var(); 5985 5986 if (var->mode() == CONST) { 5987 if (expr->op() != Token::INIT_CONST) { 5988 CHECK_ALIVE(VisitForValue(expr->value())); 5989 return ast_context()->ReturnValue(Pop()); 5990 } 5991 5992 if (var->IsStackAllocated()) { 5993 // We insert a use of the old value to detect unsupported uses of const 5994 // variables (e.g. initialization inside a loop). 5995 HValue* old_value = environment()->Lookup(var); 5996 Add<HUseConst>(old_value); 5997 } 5998 } else if (var->mode() == CONST_HARMONY) { 5999 if (expr->op() != Token::INIT_CONST_HARMONY) { 6000 return Bailout(kNonInitializerAssignmentToConst); 6001 } 6002 } 6003 6004 if (proxy->IsArguments()) return Bailout(kAssignmentToArguments); 6005 6006 // Handle the assignment. 6007 switch (var->location()) { 6008 case Variable::UNALLOCATED: 6009 CHECK_ALIVE(VisitForValue(expr->value())); 6010 HandleGlobalVariableAssignment(var, 6011 Top(), 6012 expr->AssignmentId()); 6013 return ast_context()->ReturnValue(Pop()); 6014 6015 case Variable::PARAMETER: 6016 case Variable::LOCAL: { 6017 // Perform an initialization check for let declared variables 6018 // or parameters. 6019 if (var->mode() == LET && expr->op() == Token::ASSIGN) { 6020 HValue* env_value = environment()->Lookup(var); 6021 if (env_value == graph()->GetConstantHole()) { 6022 return Bailout(kAssignmentToLetVariableBeforeInitialization); 6023 } 6024 } 6025 // We do not allow the arguments object to occur in a context where it 6026 // may escape, but assignments to stack-allocated locals are 6027 // permitted. 6028 CHECK_ALIVE(VisitForValue(expr->value(), ARGUMENTS_ALLOWED)); 6029 HValue* value = Pop(); 6030 BindIfLive(var, value); 6031 return ast_context()->ReturnValue(value); 6032 } 6033 6034 case Variable::CONTEXT: { 6035 // Bail out if we try to mutate a parameter value in a function using 6036 // the arguments object. We do not (yet) correctly handle the 6037 // arguments property of the function. 6038 if (current_info()->scope()->arguments() != NULL) { 6039 // Parameters will rewrite to context slots. We have no direct way 6040 // to detect that the variable is a parameter. 6041 int count = current_info()->scope()->num_parameters(); 6042 for (int i = 0; i < count; ++i) { 6043 if (var == current_info()->scope()->parameter(i)) { 6044 return Bailout(kAssignmentToParameterInArgumentsObject); 6045 } 6046 } 6047 } 6048 6049 CHECK_ALIVE(VisitForValue(expr->value())); 6050 HStoreContextSlot::Mode mode; 6051 if (expr->op() == Token::ASSIGN) { 6052 switch (var->mode()) { 6053 case LET: 6054 mode = HStoreContextSlot::kCheckDeoptimize; 6055 break; 6056 case CONST: 6057 return ast_context()->ReturnValue(Pop()); 6058 case CONST_HARMONY: 6059 // This case is checked statically so no need to 6060 // perform checks here 6061 UNREACHABLE(); 6062 default: 6063 mode = HStoreContextSlot::kNoCheck; 6064 } 6065 } else if (expr->op() == Token::INIT_VAR || 6066 expr->op() == Token::INIT_LET || 6067 expr->op() == Token::INIT_CONST_HARMONY) { 6068 mode = HStoreContextSlot::kNoCheck; 6069 } else { 6070 ASSERT(expr->op() == Token::INIT_CONST); 6071 6072 mode = HStoreContextSlot::kCheckIgnoreAssignment; 6073 } 6074 6075 HValue* context = BuildContextChainWalk(var); 6076 HStoreContextSlot* instr = Add<HStoreContextSlot>( 6077 context, var->index(), mode, Top()); 6078 if (instr->HasObservableSideEffects()) { 6079 Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE); 6080 } 6081 return ast_context()->ReturnValue(Pop()); 6082 } 6083 6084 case Variable::LOOKUP: 6085 return Bailout(kAssignmentToLOOKUPVariable); 6086 } 6087 } else { 6088 return Bailout(kInvalidLeftHandSideInAssignment); 6089 } 6090 } 6091 6092 6093 void HOptimizedGraphBuilder::VisitYield(Yield* expr) { 6094 // Generators are not optimized, so we should never get here. 6095 UNREACHABLE(); 6096 } 6097 6098 6099 void HOptimizedGraphBuilder::VisitThrow(Throw* expr) { 6100 ASSERT(!HasStackOverflow()); 6101 ASSERT(current_block() != NULL); 6102 ASSERT(current_block()->HasPredecessor()); 6103 // We don't optimize functions with invalid left-hand sides in 6104 // assignments, count operations, or for-in. Consequently throw can 6105 // currently only occur in an effect context. 6106 ASSERT(ast_context()->IsEffect()); 6107 CHECK_ALIVE(VisitForValue(expr->exception())); 6108 6109 HValue* value = environment()->Pop(); 6110 if (!FLAG_emit_opt_code_positions) SetSourcePosition(expr->position()); 6111 Add<HThrow>(value); 6112 Add<HSimulate>(expr->id()); 6113 6114 // If the throw definitely exits the function, we can finish with a dummy 6115 // control flow at this point. This is not the case if the throw is inside 6116 // an inlined function which may be replaced. 6117 if (call_context() == NULL) { 6118 FinishExitCurrentBlock(New<HAbnormalExit>()); 6119 } 6120 } 6121 6122 6123 HLoadNamedField* HGraphBuilder::BuildLoadNamedField(HValue* object, 6124 HObjectAccess access) { 6125 if (FLAG_track_double_fields && access.representation().IsDouble()) { 6126 // load the heap number 6127 HLoadNamedField* heap_number = Add<HLoadNamedField>( 6128 object, access.WithRepresentation(Representation::Tagged())); 6129 heap_number->set_type(HType::HeapNumber()); 6130 // load the double value from it 6131 return New<HLoadNamedField>( 6132 heap_number, HObjectAccess::ForHeapNumberValue()); 6133 } 6134 return New<HLoadNamedField>(object, access); 6135 } 6136 6137 6138 HInstruction* HGraphBuilder::AddLoadNamedField(HValue* object, 6139 HObjectAccess access) { 6140 return AddInstruction(BuildLoadNamedField(object, access)); 6141 } 6142 6143 6144 HInstruction* HGraphBuilder::BuildLoadStringLength(HValue* object, 6145 HValue* checked_string) { 6146 if (FLAG_fold_constants && object->IsConstant()) { 6147 HConstant* constant = HConstant::cast(object); 6148 if (constant->HasStringValue()) { 6149 return New<HConstant>(constant->StringValue()->length()); 6150 } 6151 } 6152 return BuildLoadNamedField(checked_string, HObjectAccess::ForStringLength()); 6153 } 6154 6155 6156 HInstruction* HOptimizedGraphBuilder::BuildLoadNamedGeneric( 6157 HValue* object, 6158 Handle<String> name, 6159 Property* expr) { 6160 if (expr->IsUninitialized()) { 6161 Add<HDeoptimize>("Insufficient type feedback for generic named load", 6162 Deoptimizer::SOFT); 6163 } 6164 return New<HLoadNamedGeneric>(object, name); 6165 } 6166 6167 6168 6169 HInstruction* HOptimizedGraphBuilder::BuildLoadKeyedGeneric(HValue* object, 6170 HValue* key) { 6171 return New<HLoadKeyedGeneric>(object, key); 6172 } 6173 6174 6175 LoadKeyedHoleMode HOptimizedGraphBuilder::BuildKeyedHoleMode(Handle<Map> map) { 6176 // Loads from a "stock" fast holey double arrays can elide the hole check. 6177 LoadKeyedHoleMode load_mode = NEVER_RETURN_HOLE; 6178 if (*map == isolate()->get_initial_js_array_map(FAST_HOLEY_DOUBLE_ELEMENTS) && 6179 isolate()->IsFastArrayConstructorPrototypeChainIntact()) { 6180 Handle<JSObject> prototype(JSObject::cast(map->prototype()), isolate()); 6181 Handle<JSObject> object_prototype = isolate()->initial_object_prototype(); 6182 BuildCheckPrototypeMaps(prototype, object_prototype); 6183 load_mode = ALLOW_RETURN_HOLE; 6184 graph()->MarkDependsOnEmptyArrayProtoElements(); 6185 } 6186 6187 return load_mode; 6188 } 6189 6190 6191 HInstruction* HOptimizedGraphBuilder::BuildMonomorphicElementAccess( 6192 HValue* object, 6193 HValue* key, 6194 HValue* val, 6195 HValue* dependency, 6196 Handle<Map> map, 6197 bool is_store, 6198 KeyedAccessStoreMode store_mode) { 6199 HCheckMaps* checked_object = Add<HCheckMaps>(object, map, top_info(), 6200 dependency); 6201 if (dependency) { 6202 checked_object->ClearGVNFlag(kDependsOnElementsKind); 6203 } 6204 6205 if (is_store && map->prototype()->IsJSObject()) { 6206 // monomorphic stores need a prototype chain check because shape 6207 // changes could allow callbacks on elements in the chain that 6208 // aren't compatible with monomorphic keyed stores. 6209 Handle<JSObject> prototype(JSObject::cast(map->prototype())); 6210 Object* holder = map->prototype(); 6211 while (holder->GetPrototype(isolate())->IsJSObject()) { 6212 holder = holder->GetPrototype(isolate()); 6213 } 6214 ASSERT(holder->GetPrototype(isolate())->IsNull()); 6215 6216 BuildCheckPrototypeMaps(prototype, 6217 Handle<JSObject>(JSObject::cast(holder))); 6218 } 6219 6220 LoadKeyedHoleMode load_mode = BuildKeyedHoleMode(map); 6221 return BuildUncheckedMonomorphicElementAccess( 6222 checked_object, key, val, 6223 map->instance_type() == JS_ARRAY_TYPE, 6224 map->elements_kind(), is_store, 6225 load_mode, store_mode); 6226 } 6227 6228 6229 HInstruction* HOptimizedGraphBuilder::TryBuildConsolidatedElementLoad( 6230 HValue* object, 6231 HValue* key, 6232 HValue* val, 6233 SmallMapList* maps) { 6234 // For polymorphic loads of similar elements kinds (i.e. all tagged or all 6235 // double), always use the "worst case" code without a transition. This is 6236 // much faster than transitioning the elements to the worst case, trading a 6237 // HTransitionElements for a HCheckMaps, and avoiding mutation of the array. 6238 bool has_double_maps = false; 6239 bool has_smi_or_object_maps = false; 6240 bool has_js_array_access = false; 6241 bool has_non_js_array_access = false; 6242 bool has_seen_holey_elements = false; 6243 Handle<Map> most_general_consolidated_map; 6244 for (int i = 0; i < maps->length(); ++i) { 6245 Handle<Map> map = maps->at(i); 6246 if (!map->IsJSObjectMap()) return NULL; 6247 // Don't allow mixing of JSArrays with JSObjects. 6248 if (map->instance_type() == JS_ARRAY_TYPE) { 6249 if (has_non_js_array_access) return NULL; 6250 has_js_array_access = true; 6251 } else if (has_js_array_access) { 6252 return NULL; 6253 } else { 6254 has_non_js_array_access = true; 6255 } 6256 // Don't allow mixed, incompatible elements kinds. 6257 if (map->has_fast_double_elements()) { 6258 if (has_smi_or_object_maps) return NULL; 6259 has_double_maps = true; 6260 } else if (map->has_fast_smi_or_object_elements()) { 6261 if (has_double_maps) return NULL; 6262 has_smi_or_object_maps = true; 6263 } else { 6264 return NULL; 6265 } 6266 // Remember if we've ever seen holey elements. 6267 if (IsHoleyElementsKind(map->elements_kind())) { 6268 has_seen_holey_elements = true; 6269 } 6270 // Remember the most general elements kind, the code for its load will 6271 // properly handle all of the more specific cases. 6272 if ((i == 0) || IsMoreGeneralElementsKindTransition( 6273 most_general_consolidated_map->elements_kind(), 6274 map->elements_kind())) { 6275 most_general_consolidated_map = map; 6276 } 6277 } 6278 if (!has_double_maps && !has_smi_or_object_maps) return NULL; 6279 6280 HCheckMaps* checked_object = Add<HCheckMaps>(object, maps); 6281 // FAST_ELEMENTS is considered more general than FAST_HOLEY_SMI_ELEMENTS. 6282 // If we've seen both, the consolidated load must use FAST_HOLEY_ELEMENTS. 6283 ElementsKind consolidated_elements_kind = has_seen_holey_elements 6284 ? GetHoleyElementsKind(most_general_consolidated_map->elements_kind()) 6285 : most_general_consolidated_map->elements_kind(); 6286 HInstruction* instr = BuildUncheckedMonomorphicElementAccess( 6287 checked_object, key, val, 6288 most_general_consolidated_map->instance_type() == JS_ARRAY_TYPE, 6289 consolidated_elements_kind, 6290 false, NEVER_RETURN_HOLE, STANDARD_STORE); 6291 return instr; 6292 } 6293 6294 6295 HValue* HOptimizedGraphBuilder::HandlePolymorphicElementAccess( 6296 HValue* object, 6297 HValue* key, 6298 HValue* val, 6299 SmallMapList* maps, 6300 bool is_store, 6301 KeyedAccessStoreMode store_mode, 6302 bool* has_side_effects) { 6303 *has_side_effects = false; 6304 BuildCheckHeapObject(object); 6305 6306 if (!is_store) { 6307 HInstruction* consolidated_load = 6308 TryBuildConsolidatedElementLoad(object, key, val, maps); 6309 if (consolidated_load != NULL) { 6310 *has_side_effects |= consolidated_load->HasObservableSideEffects(); 6311 return consolidated_load; 6312 } 6313 } 6314 6315 // Elements_kind transition support. 6316 MapHandleList transition_target(maps->length()); 6317 // Collect possible transition targets. 6318 MapHandleList possible_transitioned_maps(maps->length()); 6319 for (int i = 0; i < maps->length(); ++i) { 6320 Handle<Map> map = maps->at(i); 6321 ElementsKind elements_kind = map->elements_kind(); 6322 if (IsFastElementsKind(elements_kind) && 6323 elements_kind != GetInitialFastElementsKind()) { 6324 possible_transitioned_maps.Add(map); 6325 } 6326 } 6327 // Get transition target for each map (NULL == no transition). 6328 for (int i = 0; i < maps->length(); ++i) { 6329 Handle<Map> map = maps->at(i); 6330 Handle<Map> transitioned_map = 6331 map->FindTransitionedMap(&possible_transitioned_maps); 6332 transition_target.Add(transitioned_map); 6333 } 6334 6335 MapHandleList untransitionable_maps(maps->length()); 6336 HTransitionElementsKind* transition = NULL; 6337 for (int i = 0; i < maps->length(); ++i) { 6338 Handle<Map> map = maps->at(i); 6339 ASSERT(map->IsMap()); 6340 if (!transition_target.at(i).is_null()) { 6341 ASSERT(Map::IsValidElementsTransition( 6342 map->elements_kind(), 6343 transition_target.at(i)->elements_kind())); 6344 transition = Add<HTransitionElementsKind>(object, map, 6345 transition_target.at(i)); 6346 } else { 6347 untransitionable_maps.Add(map); 6348 } 6349 } 6350 6351 // If only one map is left after transitioning, handle this case 6352 // monomorphically. 6353 ASSERT(untransitionable_maps.length() >= 1); 6354 if (untransitionable_maps.length() == 1) { 6355 Handle<Map> untransitionable_map = untransitionable_maps[0]; 6356 HInstruction* instr = NULL; 6357 if (untransitionable_map->has_slow_elements_kind() || 6358 !untransitionable_map->IsJSObjectMap()) { 6359 instr = AddInstruction(is_store ? BuildStoreKeyedGeneric(object, key, val) 6360 : BuildLoadKeyedGeneric(object, key)); 6361 } else { 6362 instr = BuildMonomorphicElementAccess( 6363 object, key, val, transition, untransitionable_map, is_store, 6364 store_mode); 6365 } 6366 *has_side_effects |= instr->HasObservableSideEffects(); 6367 return is_store ? NULL : instr; 6368 } 6369 6370 HBasicBlock* join = graph()->CreateBasicBlock(); 6371 6372 for (int i = 0; i < untransitionable_maps.length(); ++i) { 6373 Handle<Map> map = untransitionable_maps[i]; 6374 if (!map->IsJSObjectMap()) continue; 6375 ElementsKind elements_kind = map->elements_kind(); 6376 HBasicBlock* this_map = graph()->CreateBasicBlock(); 6377 HBasicBlock* other_map = graph()->CreateBasicBlock(); 6378 HCompareMap* mapcompare = 6379 New<HCompareMap>(object, map, this_map, other_map); 6380 FinishCurrentBlock(mapcompare); 6381 6382 set_current_block(this_map); 6383 HInstruction* access = NULL; 6384 if (IsDictionaryElementsKind(elements_kind)) { 6385 access = is_store 6386 ? AddInstruction(BuildStoreKeyedGeneric(object, key, val)) 6387 : AddInstruction(BuildLoadKeyedGeneric(object, key)); 6388 } else { 6389 ASSERT(IsFastElementsKind(elements_kind) || 6390 IsExternalArrayElementsKind(elements_kind)); 6391 LoadKeyedHoleMode load_mode = BuildKeyedHoleMode(map); 6392 // Happily, mapcompare is a checked object. 6393 access = BuildUncheckedMonomorphicElementAccess( 6394 mapcompare, key, val, 6395 map->instance_type() == JS_ARRAY_TYPE, 6396 elements_kind, is_store, 6397 load_mode, 6398 store_mode); 6399 } 6400 *has_side_effects |= access->HasObservableSideEffects(); 6401 // The caller will use has_side_effects and add a correct Simulate. 6402 access->SetFlag(HValue::kHasNoObservableSideEffects); 6403 if (!is_store) { 6404 Push(access); 6405 } 6406 NoObservableSideEffectsScope scope(this); 6407 GotoNoSimulate(join); 6408 set_current_block(other_map); 6409 } 6410 6411 // Deopt if none of the cases matched. 6412 NoObservableSideEffectsScope scope(this); 6413 FinishExitWithHardDeoptimization("Unknown map in polymorphic element access", 6414 join); 6415 set_current_block(join); 6416 return is_store ? NULL : Pop(); 6417 } 6418 6419 6420 HValue* HOptimizedGraphBuilder::HandleKeyedElementAccess( 6421 HValue* obj, 6422 HValue* key, 6423 HValue* val, 6424 Expression* expr, 6425 bool is_store, 6426 bool* has_side_effects) { 6427 ASSERT(!expr->IsPropertyName()); 6428 HInstruction* instr = NULL; 6429 6430 SmallMapList* types; 6431 bool monomorphic = ComputeReceiverTypes(expr, obj, &types); 6432 6433 bool force_generic = false; 6434 if (is_store && (monomorphic || (types != NULL && !types->is_empty()))) { 6435 // Stores can't be mono/polymorphic if their prototype chain has dictionary 6436 // elements. However a receiver map that has dictionary elements itself 6437 // should be left to normal mono/poly behavior (the other maps may benefit 6438 // from highly optimized stores). 6439 for (int i = 0; i < types->length(); i++) { 6440 Handle<Map> current_map = types->at(i); 6441 if (current_map->DictionaryElementsInPrototypeChainOnly()) { 6442 force_generic = true; 6443 monomorphic = false; 6444 break; 6445 } 6446 } 6447 } 6448 6449 if (monomorphic) { 6450 Handle<Map> map = types->first(); 6451 if (map->has_slow_elements_kind()) { 6452 instr = is_store ? BuildStoreKeyedGeneric(obj, key, val) 6453 : BuildLoadKeyedGeneric(<