1 // Copyright 2013 the V8 project authors. All rights reserved. 2 // Redistribution and use in source and binary forms, with or without 3 // modification, are permitted provided that the following conditions are 4 // met: 5 // 6 // * Redistributions of source code must retain the above copyright 7 // notice, this list of conditions and the following disclaimer. 8 // * Redistributions in binary form must reproduce the above 9 // copyright notice, this list of conditions and the following 10 // disclaimer in the documentation and/or other materials provided 11 // with the distribution. 12 // * Neither the name of Google Inc. nor the names of its 13 // contributors may be used to endorse or promote products derived 14 // from this software without specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28 #include "hydrogen.h" 29 30 #include <algorithm> 31 32 #include "v8.h" 33 #include "codegen.h" 34 #include "full-codegen.h" 35 #include "hashmap.h" 36 #include "hydrogen-bce.h" 37 #include "hydrogen-bch.h" 38 #include "hydrogen-canonicalize.h" 39 #include "hydrogen-dce.h" 40 #include "hydrogen-dehoist.h" 41 #include "hydrogen-deoptimizing-mark.h" 42 #include "hydrogen-environment-liveness.h" 43 #include "hydrogen-escape-analysis.h" 44 #include "hydrogen-infer-representation.h" 45 #include "hydrogen-infer-types.h" 46 #include "hydrogen-gvn.h" 47 #include "hydrogen-mark-deoptimize.h" 48 #include "hydrogen-minus-zero.h" 49 #include "hydrogen-osr.h" 50 #include "hydrogen-range-analysis.h" 51 #include "hydrogen-redundant-phi.h" 52 #include "hydrogen-removable-simulates.h" 53 #include "hydrogen-representation-changes.h" 54 #include "hydrogen-sce.h" 55 #include "hydrogen-uint32-analysis.h" 56 #include "lithium-allocator.h" 57 #include "parser.h" 58 #include "scopeinfo.h" 59 #include "scopes.h" 60 #include "stub-cache.h" 61 #include "typing.h" 62 63 #if V8_TARGET_ARCH_IA32 64 #include "ia32/lithium-codegen-ia32.h" 65 #elif V8_TARGET_ARCH_X64 66 #include "x64/lithium-codegen-x64.h" 67 #elif V8_TARGET_ARCH_ARM 68 #include "arm/lithium-codegen-arm.h" 69 #elif V8_TARGET_ARCH_MIPS 70 #include "mips/lithium-codegen-mips.h" 71 #else 72 #error Unsupported target architecture. 73 #endif 74 75 namespace v8 { 76 namespace internal { 77 78 HBasicBlock::HBasicBlock(HGraph* graph) 79 : block_id_(graph->GetNextBlockID()), 80 graph_(graph), 81 phis_(4, graph->zone()), 82 first_(NULL), 83 last_(NULL), 84 end_(NULL), 85 loop_information_(NULL), 86 predecessors_(2, graph->zone()), 87 dominator_(NULL), 88 dominated_blocks_(4, graph->zone()), 89 last_environment_(NULL), 90 argument_count_(-1), 91 first_instruction_index_(-1), 92 last_instruction_index_(-1), 93 deleted_phis_(4, graph->zone()), 94 parent_loop_header_(NULL), 95 inlined_entry_block_(NULL), 96 is_inline_return_target_(false), 97 is_deoptimizing_(false), 98 dominates_loop_successors_(false), 99 is_osr_entry_(false) { } 100 101 102 Isolate* HBasicBlock::isolate() const { 103 return graph_->isolate(); 104 } 105 106 107 void HBasicBlock::AttachLoopInformation() { 108 ASSERT(!IsLoopHeader()); 109 loop_information_ = new(zone()) HLoopInformation(this, zone()); 110 } 111 112 113 void HBasicBlock::DetachLoopInformation() { 114 ASSERT(IsLoopHeader()); 115 loop_information_ = NULL; 116 } 117 118 119 void HBasicBlock::AddPhi(HPhi* phi) { 120 ASSERT(!IsStartBlock()); 121 phis_.Add(phi, zone()); 122 phi->SetBlock(this); 123 } 124 125 126 void HBasicBlock::RemovePhi(HPhi* phi) { 127 ASSERT(phi->block() == this); 128 ASSERT(phis_.Contains(phi)); 129 phi->Kill(); 130 phis_.RemoveElement(phi); 131 phi->SetBlock(NULL); 132 } 133 134 135 void HBasicBlock::AddInstruction(HInstruction* instr) { 136 ASSERT(!IsStartBlock() || !IsFinished()); 137 ASSERT(!instr->IsLinked()); 138 ASSERT(!IsFinished()); 139 140 if (first_ == NULL) { 141 ASSERT(last_environment() != NULL); 142 ASSERT(!last_environment()->ast_id().IsNone()); 143 HBlockEntry* entry = new(zone()) HBlockEntry(); 144 entry->InitializeAsFirst(this); 145 first_ = last_ = entry; 146 } 147 instr->InsertAfter(last_); 148 } 149 150 151 HPhi* HBasicBlock::AddNewPhi(int merged_index) { 152 if (graph()->IsInsideNoSideEffectsScope()) { 153 merged_index = HPhi::kInvalidMergedIndex; 154 } 155 HPhi* phi = new(zone()) HPhi(merged_index, zone()); 156 AddPhi(phi); 157 return phi; 158 } 159 160 161 HSimulate* HBasicBlock::CreateSimulate(BailoutId ast_id, 162 RemovableSimulate removable) { 163 ASSERT(HasEnvironment()); 164 HEnvironment* environment = last_environment(); 165 ASSERT(ast_id.IsNone() || 166 ast_id == BailoutId::StubEntry() || 167 environment->closure()->shared()->VerifyBailoutId(ast_id)); 168 169 int push_count = environment->push_count(); 170 int pop_count = environment->pop_count(); 171 172 HSimulate* instr = 173 new(zone()) HSimulate(ast_id, pop_count, zone(), removable); 174 #ifdef DEBUG 175 instr->set_closure(environment->closure()); 176 #endif 177 // Order of pushed values: newest (top of stack) first. This allows 178 // HSimulate::MergeWith() to easily append additional pushed values 179 // that are older (from further down the stack). 180 for (int i = 0; i < push_count; ++i) { 181 instr->AddPushedValue(environment->ExpressionStackAt(i)); 182 } 183 for (GrowableBitVector::Iterator it(environment->assigned_variables(), 184 zone()); 185 !it.Done(); 186 it.Advance()) { 187 int index = it.Current(); 188 instr->AddAssignedValue(index, environment->Lookup(index)); 189 } 190 environment->ClearHistory(); 191 return instr; 192 } 193 194 195 void HBasicBlock::Finish(HControlInstruction* end) { 196 ASSERT(!IsFinished()); 197 AddInstruction(end); 198 end_ = end; 199 for (HSuccessorIterator it(end); !it.Done(); it.Advance()) { 200 it.Current()->RegisterPredecessor(this); 201 } 202 } 203 204 205 void HBasicBlock::Goto(HBasicBlock* block, 206 FunctionState* state, 207 bool add_simulate) { 208 bool drop_extra = state != NULL && 209 state->inlining_kind() == DROP_EXTRA_ON_RETURN; 210 211 if (block->IsInlineReturnTarget()) { 212 AddInstruction(new(zone()) HLeaveInlined()); 213 UpdateEnvironment(last_environment()->DiscardInlined(drop_extra)); 214 } 215 216 if (add_simulate) AddNewSimulate(BailoutId::None()); 217 HGoto* instr = new(zone()) HGoto(block); 218 Finish(instr); 219 } 220 221 222 void HBasicBlock::AddLeaveInlined(HValue* return_value, 223 FunctionState* state) { 224 HBasicBlock* target = state->function_return(); 225 bool drop_extra = state->inlining_kind() == DROP_EXTRA_ON_RETURN; 226 227 ASSERT(target->IsInlineReturnTarget()); 228 ASSERT(return_value != NULL); 229 AddInstruction(new(zone()) HLeaveInlined()); 230 UpdateEnvironment(last_environment()->DiscardInlined(drop_extra)); 231 last_environment()->Push(return_value); 232 AddNewSimulate(BailoutId::None()); 233 HGoto* instr = new(zone()) HGoto(target); 234 Finish(instr); 235 } 236 237 238 void HBasicBlock::SetInitialEnvironment(HEnvironment* env) { 239 ASSERT(!HasEnvironment()); 240 ASSERT(first() == NULL); 241 UpdateEnvironment(env); 242 } 243 244 245 void HBasicBlock::UpdateEnvironment(HEnvironment* env) { 246 last_environment_ = env; 247 graph()->update_maximum_environment_size(env->first_expression_index()); 248 } 249 250 251 void HBasicBlock::SetJoinId(BailoutId ast_id) { 252 int length = predecessors_.length(); 253 ASSERT(length > 0); 254 for (int i = 0; i < length; i++) { 255 HBasicBlock* predecessor = predecessors_[i]; 256 ASSERT(predecessor->end()->IsGoto()); 257 HSimulate* simulate = HSimulate::cast(predecessor->end()->previous()); 258 ASSERT(i != 0 || 259 (predecessor->last_environment()->closure().is_null() || 260 predecessor->last_environment()->closure()->shared() 261 ->VerifyBailoutId(ast_id))); 262 simulate->set_ast_id(ast_id); 263 predecessor->last_environment()->set_ast_id(ast_id); 264 } 265 } 266 267 268 bool HBasicBlock::Dominates(HBasicBlock* other) const { 269 HBasicBlock* current = other->dominator(); 270 while (current != NULL) { 271 if (current == this) return true; 272 current = current->dominator(); 273 } 274 return false; 275 } 276 277 278 int HBasicBlock::LoopNestingDepth() const { 279 const HBasicBlock* current = this; 280 int result = (current->IsLoopHeader()) ? 1 : 0; 281 while (current->parent_loop_header() != NULL) { 282 current = current->parent_loop_header(); 283 result++; 284 } 285 return result; 286 } 287 288 289 void HBasicBlock::PostProcessLoopHeader(IterationStatement* stmt) { 290 ASSERT(IsLoopHeader()); 291 292 SetJoinId(stmt->EntryId()); 293 if (predecessors()->length() == 1) { 294 // This is a degenerated loop. 295 DetachLoopInformation(); 296 return; 297 } 298 299 // Only the first entry into the loop is from outside the loop. All other 300 // entries must be back edges. 301 for (int i = 1; i < predecessors()->length(); ++i) { 302 loop_information()->RegisterBackEdge(predecessors()->at(i)); 303 } 304 } 305 306 307 void HBasicBlock::RegisterPredecessor(HBasicBlock* pred) { 308 if (HasPredecessor()) { 309 // Only loop header blocks can have a predecessor added after 310 // instructions have been added to the block (they have phis for all 311 // values in the environment, these phis may be eliminated later). 312 ASSERT(IsLoopHeader() || first_ == NULL); 313 HEnvironment* incoming_env = pred->last_environment(); 314 if (IsLoopHeader()) { 315 ASSERT(phis()->length() == incoming_env->length()); 316 for (int i = 0; i < phis_.length(); ++i) { 317 phis_[i]->AddInput(incoming_env->values()->at(i)); 318 } 319 } else { 320 last_environment()->AddIncomingEdge(this, pred->last_environment()); 321 } 322 } else if (!HasEnvironment() && !IsFinished()) { 323 ASSERT(!IsLoopHeader()); 324 SetInitialEnvironment(pred->last_environment()->Copy()); 325 } 326 327 predecessors_.Add(pred, zone()); 328 } 329 330 331 void HBasicBlock::AddDominatedBlock(HBasicBlock* block) { 332 ASSERT(!dominated_blocks_.Contains(block)); 333 // Keep the list of dominated blocks sorted such that if there is two 334 // succeeding block in this list, the predecessor is before the successor. 335 int index = 0; 336 while (index < dominated_blocks_.length() && 337 dominated_blocks_[index]->block_id() < block->block_id()) { 338 ++index; 339 } 340 dominated_blocks_.InsertAt(index, block, zone()); 341 } 342 343 344 void HBasicBlock::AssignCommonDominator(HBasicBlock* other) { 345 if (dominator_ == NULL) { 346 dominator_ = other; 347 other->AddDominatedBlock(this); 348 } else if (other->dominator() != NULL) { 349 HBasicBlock* first = dominator_; 350 HBasicBlock* second = other; 351 352 while (first != second) { 353 if (first->block_id() > second->block_id()) { 354 first = first->dominator(); 355 } else { 356 second = second->dominator(); 357 } 358 ASSERT(first != NULL && second != NULL); 359 } 360 361 if (dominator_ != first) { 362 ASSERT(dominator_->dominated_blocks_.Contains(this)); 363 dominator_->dominated_blocks_.RemoveElement(this); 364 dominator_ = first; 365 first->AddDominatedBlock(this); 366 } 367 } 368 } 369 370 371 void HBasicBlock::AssignLoopSuccessorDominators() { 372 // Mark blocks that dominate all subsequent reachable blocks inside their 373 // loop. Exploit the fact that blocks are sorted in reverse post order. When 374 // the loop is visited in increasing block id order, if the number of 375 // non-loop-exiting successor edges at the dominator_candidate block doesn't 376 // exceed the number of previously encountered predecessor edges, there is no 377 // path from the loop header to any block with higher id that doesn't go 378 // through the dominator_candidate block. In this case, the 379 // dominator_candidate block is guaranteed to dominate all blocks reachable 380 // from it with higher ids. 381 HBasicBlock* last = loop_information()->GetLastBackEdge(); 382 int outstanding_successors = 1; // one edge from the pre-header 383 // Header always dominates everything. 384 MarkAsLoopSuccessorDominator(); 385 for (int j = block_id(); j <= last->block_id(); ++j) { 386 HBasicBlock* dominator_candidate = graph_->blocks()->at(j); 387 for (HPredecessorIterator it(dominator_candidate); !it.Done(); 388 it.Advance()) { 389 HBasicBlock* predecessor = it.Current(); 390 // Don't count back edges. 391 if (predecessor->block_id() < dominator_candidate->block_id()) { 392 outstanding_successors--; 393 } 394 } 395 396 // If more successors than predecessors have been seen in the loop up to 397 // now, it's not possible to guarantee that the current block dominates 398 // all of the blocks with higher IDs. In this case, assume conservatively 399 // that those paths through loop that don't go through the current block 400 // contain all of the loop's dependencies. Also be careful to record 401 // dominator information about the current loop that's being processed, 402 // and not nested loops, which will be processed when 403 // AssignLoopSuccessorDominators gets called on their header. 404 ASSERT(outstanding_successors >= 0); 405 HBasicBlock* parent_loop_header = dominator_candidate->parent_loop_header(); 406 if (outstanding_successors == 0 && 407 (parent_loop_header == this && !dominator_candidate->IsLoopHeader())) { 408 dominator_candidate->MarkAsLoopSuccessorDominator(); 409 } 410 HControlInstruction* end = dominator_candidate->end(); 411 for (HSuccessorIterator it(end); !it.Done(); it.Advance()) { 412 HBasicBlock* successor = it.Current(); 413 // Only count successors that remain inside the loop and don't loop back 414 // to a loop header. 415 if (successor->block_id() > dominator_candidate->block_id() && 416 successor->block_id() <= last->block_id()) { 417 // Backwards edges must land on loop headers. 418 ASSERT(successor->block_id() > dominator_candidate->block_id() || 419 successor->IsLoopHeader()); 420 outstanding_successors++; 421 } 422 } 423 } 424 } 425 426 427 int HBasicBlock::PredecessorIndexOf(HBasicBlock* predecessor) const { 428 for (int i = 0; i < predecessors_.length(); ++i) { 429 if (predecessors_[i] == predecessor) return i; 430 } 431 UNREACHABLE(); 432 return -1; 433 } 434 435 436 #ifdef DEBUG 437 void HBasicBlock::Verify() { 438 // Check that every block is finished. 439 ASSERT(IsFinished()); 440 ASSERT(block_id() >= 0); 441 442 // Check that the incoming edges are in edge split form. 443 if (predecessors_.length() > 1) { 444 for (int i = 0; i < predecessors_.length(); ++i) { 445 ASSERT(predecessors_[i]->end()->SecondSuccessor() == NULL); 446 } 447 } 448 } 449 #endif 450 451 452 void HLoopInformation::RegisterBackEdge(HBasicBlock* block) { 453 this->back_edges_.Add(block, block->zone()); 454 AddBlock(block); 455 } 456 457 458 HBasicBlock* HLoopInformation::GetLastBackEdge() const { 459 int max_id = -1; 460 HBasicBlock* result = NULL; 461 for (int i = 0; i < back_edges_.length(); ++i) { 462 HBasicBlock* cur = back_edges_[i]; 463 if (cur->block_id() > max_id) { 464 max_id = cur->block_id(); 465 result = cur; 466 } 467 } 468 return result; 469 } 470 471 472 void HLoopInformation::AddBlock(HBasicBlock* block) { 473 if (block == loop_header()) return; 474 if (block->parent_loop_header() == loop_header()) return; 475 if (block->parent_loop_header() != NULL) { 476 AddBlock(block->parent_loop_header()); 477 } else { 478 block->set_parent_loop_header(loop_header()); 479 blocks_.Add(block, block->zone()); 480 for (int i = 0; i < block->predecessors()->length(); ++i) { 481 AddBlock(block->predecessors()->at(i)); 482 } 483 } 484 } 485 486 487 #ifdef DEBUG 488 489 // Checks reachability of the blocks in this graph and stores a bit in 490 // the BitVector "reachable()" for every block that can be reached 491 // from the start block of the graph. If "dont_visit" is non-null, the given 492 // block is treated as if it would not be part of the graph. "visited_count()" 493 // returns the number of reachable blocks. 494 class ReachabilityAnalyzer BASE_EMBEDDED { 495 public: 496 ReachabilityAnalyzer(HBasicBlock* entry_block, 497 int block_count, 498 HBasicBlock* dont_visit) 499 : visited_count_(0), 500 stack_(16, entry_block->zone()), 501 reachable_(block_count, entry_block->zone()), 502 dont_visit_(dont_visit) { 503 PushBlock(entry_block); 504 Analyze(); 505 } 506 507 int visited_count() const { return visited_count_; } 508 const BitVector* reachable() const { return &reachable_; } 509 510 private: 511 void PushBlock(HBasicBlock* block) { 512 if (block != NULL && block != dont_visit_ && 513 !reachable_.Contains(block->block_id())) { 514 reachable_.Add(block->block_id()); 515 stack_.Add(block, block->zone()); 516 visited_count_++; 517 } 518 } 519 520 void Analyze() { 521 while (!stack_.is_empty()) { 522 HControlInstruction* end = stack_.RemoveLast()->end(); 523 for (HSuccessorIterator it(end); !it.Done(); it.Advance()) { 524 PushBlock(it.Current()); 525 } 526 } 527 } 528 529 int visited_count_; 530 ZoneList<HBasicBlock*> stack_; 531 BitVector reachable_; 532 HBasicBlock* dont_visit_; 533 }; 534 535 536 void HGraph::Verify(bool do_full_verify) const { 537 Heap::RelocationLock relocation_lock(isolate()->heap()); 538 AllowHandleDereference allow_deref; 539 AllowDeferredHandleDereference allow_deferred_deref; 540 for (int i = 0; i < blocks_.length(); i++) { 541 HBasicBlock* block = blocks_.at(i); 542 543 block->Verify(); 544 545 // Check that every block contains at least one node and that only the last 546 // node is a control instruction. 547 HInstruction* current = block->first(); 548 ASSERT(current != NULL && current->IsBlockEntry()); 549 while (current != NULL) { 550 ASSERT((current->next() == NULL) == current->IsControlInstruction()); 551 ASSERT(current->block() == block); 552 current->Verify(); 553 current = current->next(); 554 } 555 556 // Check that successors are correctly set. 557 HBasicBlock* first = block->end()->FirstSuccessor(); 558 HBasicBlock* second = block->end()->SecondSuccessor(); 559 ASSERT(second == NULL || first != NULL); 560 561 // Check that the predecessor array is correct. 562 if (first != NULL) { 563 ASSERT(first->predecessors()->Contains(block)); 564 if (second != NULL) { 565 ASSERT(second->predecessors()->Contains(block)); 566 } 567 } 568 569 // Check that phis have correct arguments. 570 for (int j = 0; j < block->phis()->length(); j++) { 571 HPhi* phi = block->phis()->at(j); 572 phi->Verify(); 573 } 574 575 // Check that all join blocks have predecessors that end with an 576 // unconditional goto and agree on their environment node id. 577 if (block->predecessors()->length() >= 2) { 578 BailoutId id = 579 block->predecessors()->first()->last_environment()->ast_id(); 580 for (int k = 0; k < block->predecessors()->length(); k++) { 581 HBasicBlock* predecessor = block->predecessors()->at(k); 582 ASSERT(predecessor->end()->IsGoto()); 583 ASSERT(predecessor->last_environment()->ast_id() == id); 584 } 585 } 586 } 587 588 // Check special property of first block to have no predecessors. 589 ASSERT(blocks_.at(0)->predecessors()->is_empty()); 590 591 if (do_full_verify) { 592 // Check that the graph is fully connected. 593 ReachabilityAnalyzer analyzer(entry_block_, blocks_.length(), NULL); 594 ASSERT(analyzer.visited_count() == blocks_.length()); 595 596 // Check that entry block dominator is NULL. 597 ASSERT(entry_block_->dominator() == NULL); 598 599 // Check dominators. 600 for (int i = 0; i < blocks_.length(); ++i) { 601 HBasicBlock* block = blocks_.at(i); 602 if (block->dominator() == NULL) { 603 // Only start block may have no dominator assigned to. 604 ASSERT(i == 0); 605 } else { 606 // Assert that block is unreachable if dominator must not be visited. 607 ReachabilityAnalyzer dominator_analyzer(entry_block_, 608 blocks_.length(), 609 block->dominator()); 610 ASSERT(!dominator_analyzer.reachable()->Contains(block->block_id())); 611 } 612 } 613 } 614 } 615 616 #endif 617 618 619 HConstant* HGraph::GetConstant(SetOncePointer<HConstant>* pointer, 620 int32_t value) { 621 if (!pointer->is_set()) { 622 // Can't pass GetInvalidContext() to HConstant::New, because that will 623 // recursively call GetConstant 624 HConstant* constant = HConstant::New(zone(), NULL, value); 625 constant->InsertAfter(GetConstantUndefined()); 626 pointer->set(constant); 627 } 628 return pointer->get(); 629 } 630 631 632 HConstant* HGraph::GetConstant0() { 633 return GetConstant(&constant_0_, 0); 634 } 635 636 637 HConstant* HGraph::GetConstant1() { 638 return GetConstant(&constant_1_, 1); 639 } 640 641 642 HConstant* HGraph::GetConstantMinus1() { 643 return GetConstant(&constant_minus1_, -1); 644 } 645 646 647 #define DEFINE_GET_CONSTANT(Name, name, htype, boolean_value) \ 648 HConstant* HGraph::GetConstant##Name() { \ 649 if (!constant_##name##_.is_set()) { \ 650 HConstant* constant = new(zone()) HConstant( \ 651 isolate()->factory()->name##_value(), \ 652 UniqueValueId(isolate()->heap()->name##_value()), \ 653 Representation::Tagged(), \ 654 htype, \ 655 false, \ 656 true, \ 657 false, \ 658 boolean_value); \ 659 constant->InsertAfter(GetConstantUndefined()); \ 660 constant_##name##_.set(constant); \ 661 } \ 662 return constant_##name##_.get(); \ 663 } 664 665 666 DEFINE_GET_CONSTANT(True, true, HType::Boolean(), true) 667 DEFINE_GET_CONSTANT(False, false, HType::Boolean(), false) 668 DEFINE_GET_CONSTANT(Hole, the_hole, HType::Tagged(), false) 669 DEFINE_GET_CONSTANT(Null, null, HType::Tagged(), false) 670 671 672 #undef DEFINE_GET_CONSTANT 673 674 675 HConstant* HGraph::GetInvalidContext() { 676 return GetConstant(&constant_invalid_context_, 0xFFFFC0C7); 677 } 678 679 680 bool HGraph::IsStandardConstant(HConstant* constant) { 681 if (constant == GetConstantUndefined()) return true; 682 if (constant == GetConstant0()) return true; 683 if (constant == GetConstant1()) return true; 684 if (constant == GetConstantMinus1()) return true; 685 if (constant == GetConstantTrue()) return true; 686 if (constant == GetConstantFalse()) return true; 687 if (constant == GetConstantHole()) return true; 688 if (constant == GetConstantNull()) return true; 689 return false; 690 } 691 692 693 HGraphBuilder::IfBuilder::IfBuilder(HGraphBuilder* builder, int position) 694 : builder_(builder), 695 position_(position), 696 finished_(false), 697 deopt_then_(false), 698 deopt_else_(false), 699 did_then_(false), 700 did_else_(false), 701 did_and_(false), 702 did_or_(false), 703 captured_(false), 704 needs_compare_(true), 705 split_edge_merge_block_(NULL), 706 merge_block_(NULL) { 707 HEnvironment* env = builder->environment(); 708 first_true_block_ = builder->CreateBasicBlock(env->Copy()); 709 last_true_block_ = NULL; 710 first_false_block_ = builder->CreateBasicBlock(env->Copy()); 711 } 712 713 714 HGraphBuilder::IfBuilder::IfBuilder( 715 HGraphBuilder* builder, 716 HIfContinuation* continuation) 717 : builder_(builder), 718 position_(RelocInfo::kNoPosition), 719 finished_(false), 720 deopt_then_(false), 721 deopt_else_(false), 722 did_then_(false), 723 did_else_(false), 724 did_and_(false), 725 did_or_(false), 726 captured_(false), 727 needs_compare_(false), 728 first_true_block_(NULL), 729 first_false_block_(NULL), 730 split_edge_merge_block_(NULL), 731 merge_block_(NULL) { 732 continuation->Continue(&first_true_block_, 733 &first_false_block_, 734 &position_); 735 } 736 737 738 void HGraphBuilder::IfBuilder::AddCompare(HControlInstruction* compare) { 739 if (split_edge_merge_block_ != NULL) { 740 HEnvironment* env = first_false_block_->last_environment(); 741 HBasicBlock* split_edge = 742 builder_->CreateBasicBlock(env->Copy()); 743 if (did_or_) { 744 compare->SetSuccessorAt(0, split_edge); 745 compare->SetSuccessorAt(1, first_false_block_); 746 } else { 747 compare->SetSuccessorAt(0, first_true_block_); 748 compare->SetSuccessorAt(1, split_edge); 749 } 750 split_edge->GotoNoSimulate(split_edge_merge_block_); 751 } else { 752 compare->SetSuccessorAt(0, first_true_block_); 753 compare->SetSuccessorAt(1, first_false_block_); 754 } 755 builder_->current_block()->Finish(compare); 756 needs_compare_ = false; 757 } 758 759 760 void HGraphBuilder::IfBuilder::Or() { 761 ASSERT(!did_and_); 762 did_or_ = true; 763 HEnvironment* env = first_false_block_->last_environment(); 764 if (split_edge_merge_block_ == NULL) { 765 split_edge_merge_block_ = 766 builder_->CreateBasicBlock(env->Copy()); 767 first_true_block_->GotoNoSimulate(split_edge_merge_block_); 768 first_true_block_ = split_edge_merge_block_; 769 } 770 builder_->set_current_block(first_false_block_); 771 first_false_block_ = builder_->CreateBasicBlock(env->Copy()); 772 } 773 774 775 void HGraphBuilder::IfBuilder::And() { 776 ASSERT(!did_or_); 777 did_and_ = true; 778 HEnvironment* env = first_false_block_->last_environment(); 779 if (split_edge_merge_block_ == NULL) { 780 split_edge_merge_block_ = builder_->CreateBasicBlock(env->Copy()); 781 first_false_block_->GotoNoSimulate(split_edge_merge_block_); 782 first_false_block_ = split_edge_merge_block_; 783 } 784 builder_->set_current_block(first_true_block_); 785 first_true_block_ = builder_->CreateBasicBlock(env->Copy()); 786 } 787 788 789 void HGraphBuilder::IfBuilder::CaptureContinuation( 790 HIfContinuation* continuation) { 791 ASSERT(!finished_); 792 ASSERT(!captured_); 793 HBasicBlock* true_block = last_true_block_ == NULL 794 ? first_true_block_ 795 : last_true_block_; 796 HBasicBlock* false_block = did_else_ && (first_false_block_ != NULL) 797 ? builder_->current_block() 798 : first_false_block_; 799 continuation->Capture(true_block, false_block, position_); 800 captured_ = true; 801 End(); 802 } 803 804 805 void HGraphBuilder::IfBuilder::Then() { 806 ASSERT(!captured_); 807 ASSERT(!finished_); 808 did_then_ = true; 809 if (needs_compare_) { 810 // Handle if's without any expressions, they jump directly to the "else" 811 // branch. However, we must pretend that the "then" branch is reachable, 812 // so that the graph builder visits it and sees any live range extending 813 // constructs within it. 814 HConstant* constant_false = builder_->graph()->GetConstantFalse(); 815 ToBooleanStub::Types boolean_type = ToBooleanStub::Types(); 816 boolean_type.Add(ToBooleanStub::BOOLEAN); 817 HBranch* branch = 818 new(zone()) HBranch(constant_false, boolean_type, first_true_block_, 819 first_false_block_); 820 builder_->current_block()->Finish(branch); 821 } 822 builder_->set_current_block(first_true_block_); 823 } 824 825 826 void HGraphBuilder::IfBuilder::Else() { 827 ASSERT(did_then_); 828 ASSERT(!captured_); 829 ASSERT(!finished_); 830 last_true_block_ = builder_->current_block(); 831 ASSERT(first_true_block_ == NULL || !last_true_block_->IsFinished()); 832 builder_->set_current_block(first_false_block_); 833 did_else_ = true; 834 } 835 836 837 void HGraphBuilder::IfBuilder::Deopt(const char* reason) { 838 ASSERT(did_then_); 839 if (did_else_) { 840 deopt_else_ = true; 841 } else { 842 deopt_then_ = true; 843 } 844 builder_->Add<HDeoptimize>(reason, Deoptimizer::EAGER); 845 } 846 847 848 void HGraphBuilder::IfBuilder::Return(HValue* value) { 849 HBasicBlock* block = builder_->current_block(); 850 HValue* parameter_count = builder_->graph()->GetConstantMinus1(); 851 block->FinishExit(builder_->New<HReturn>(value, parameter_count)); 852 builder_->set_current_block(NULL); 853 if (did_else_) { 854 first_false_block_ = NULL; 855 } else { 856 first_true_block_ = NULL; 857 } 858 } 859 860 861 void HGraphBuilder::IfBuilder::End() { 862 if (!captured_) { 863 ASSERT(did_then_); 864 if (!did_else_) { 865 last_true_block_ = builder_->current_block(); 866 } 867 if (first_true_block_ == NULL) { 868 // Return on true. Nothing to do, just continue the false block. 869 } else if (first_false_block_ == NULL) { 870 // Deopt on false. Nothing to do except switching to the true block. 871 builder_->set_current_block(last_true_block_); 872 } else { 873 merge_block_ = builder_->graph()->CreateBasicBlock(); 874 ASSERT(!finished_); 875 if (!did_else_) Else(); 876 ASSERT(!last_true_block_->IsFinished()); 877 HBasicBlock* last_false_block = builder_->current_block(); 878 ASSERT(!last_false_block->IsFinished()); 879 if (deopt_then_) { 880 last_false_block->GotoNoSimulate(merge_block_); 881 builder_->PadEnvironmentForContinuation(last_true_block_, 882 merge_block_); 883 last_true_block_->GotoNoSimulate(merge_block_); 884 } else { 885 last_true_block_->GotoNoSimulate(merge_block_); 886 if (deopt_else_) { 887 builder_->PadEnvironmentForContinuation(last_false_block, 888 merge_block_); 889 } 890 last_false_block->GotoNoSimulate(merge_block_); 891 } 892 builder_->set_current_block(merge_block_); 893 } 894 } 895 finished_ = true; 896 } 897 898 899 HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder, 900 HValue* context, 901 LoopBuilder::Direction direction) 902 : builder_(builder), 903 context_(context), 904 direction_(direction), 905 finished_(false) { 906 header_block_ = builder->CreateLoopHeaderBlock(); 907 body_block_ = NULL; 908 exit_block_ = NULL; 909 } 910 911 912 HValue* HGraphBuilder::LoopBuilder::BeginBody( 913 HValue* initial, 914 HValue* terminating, 915 Token::Value token) { 916 HEnvironment* env = builder_->environment(); 917 phi_ = header_block_->AddNewPhi(env->values()->length()); 918 phi_->AddInput(initial); 919 env->Push(initial); 920 builder_->current_block()->GotoNoSimulate(header_block_); 921 922 HEnvironment* body_env = env->Copy(); 923 HEnvironment* exit_env = env->Copy(); 924 body_block_ = builder_->CreateBasicBlock(body_env); 925 exit_block_ = builder_->CreateBasicBlock(exit_env); 926 // Remove the phi from the expression stack 927 body_env->Pop(); 928 929 builder_->set_current_block(header_block_); 930 HCompareNumericAndBranch* compare = 931 new(zone()) HCompareNumericAndBranch(phi_, terminating, token); 932 compare->SetSuccessorAt(0, body_block_); 933 compare->SetSuccessorAt(1, exit_block_); 934 builder_->current_block()->Finish(compare); 935 936 builder_->set_current_block(body_block_); 937 if (direction_ == kPreIncrement || direction_ == kPreDecrement) { 938 HValue* one = builder_->graph()->GetConstant1(); 939 if (direction_ == kPreIncrement) { 940 increment_ = HAdd::New(zone(), context_, phi_, one); 941 } else { 942 increment_ = HSub::New(zone(), context_, phi_, one); 943 } 944 increment_->ClearFlag(HValue::kCanOverflow); 945 builder_->AddInstruction(increment_); 946 return increment_; 947 } else { 948 return phi_; 949 } 950 } 951 952 953 void HGraphBuilder::LoopBuilder::EndBody() { 954 ASSERT(!finished_); 955 956 if (direction_ == kPostIncrement || direction_ == kPostDecrement) { 957 HValue* one = builder_->graph()->GetConstant1(); 958 if (direction_ == kPostIncrement) { 959 increment_ = HAdd::New(zone(), context_, phi_, one); 960 } else { 961 increment_ = HSub::New(zone(), context_, phi_, one); 962 } 963 increment_->ClearFlag(HValue::kCanOverflow); 964 builder_->AddInstruction(increment_); 965 } 966 967 // Push the new increment value on the expression stack to merge into the phi. 968 builder_->environment()->Push(increment_); 969 HBasicBlock* last_block = builder_->current_block(); 970 last_block->GotoNoSimulate(header_block_); 971 header_block_->loop_information()->RegisterBackEdge(last_block); 972 973 builder_->set_current_block(exit_block_); 974 // Pop the phi from the expression stack 975 builder_->environment()->Pop(); 976 finished_ = true; 977 } 978 979 980 HGraph* HGraphBuilder::CreateGraph() { 981 graph_ = new(zone()) HGraph(info_); 982 if (FLAG_hydrogen_stats) isolate()->GetHStatistics()->Initialize(info_); 983 CompilationPhase phase("H_Block building", info_); 984 set_current_block(graph()->entry_block()); 985 if (!BuildGraph()) return NULL; 986 graph()->FinalizeUniqueValueIds(); 987 return graph_; 988 } 989 990 991 HInstruction* HGraphBuilder::AddInstruction(HInstruction* instr) { 992 ASSERT(current_block() != NULL); 993 current_block()->AddInstruction(instr); 994 if (graph()->IsInsideNoSideEffectsScope()) { 995 instr->SetFlag(HValue::kHasNoObservableSideEffects); 996 } 997 return instr; 998 } 999 1000 1001 void HGraphBuilder::AddIncrementCounter(StatsCounter* counter, 1002 HValue* context) { 1003 if (FLAG_native_code_counters && counter->Enabled()) { 1004 HValue* reference = Add<HConstant>(ExternalReference(counter)); 1005 HValue* old_value = Add<HLoadNamedField>(reference, 1006 HObjectAccess::ForCounter()); 1007 HValue* new_value = Add<HAdd>(old_value, graph()->GetConstant1()); 1008 new_value->ClearFlag(HValue::kCanOverflow); // Ignore counter overflow 1009 Add<HStoreNamedField>(reference, HObjectAccess::ForCounter(), 1010 new_value); 1011 } 1012 } 1013 1014 1015 void HGraphBuilder::AddSimulate(BailoutId id, 1016 RemovableSimulate removable) { 1017 ASSERT(current_block() != NULL); 1018 ASSERT(!graph()->IsInsideNoSideEffectsScope()); 1019 current_block()->AddNewSimulate(id, removable); 1020 } 1021 1022 1023 HBasicBlock* HGraphBuilder::CreateBasicBlock(HEnvironment* env) { 1024 HBasicBlock* b = graph()->CreateBasicBlock(); 1025 b->SetInitialEnvironment(env); 1026 return b; 1027 } 1028 1029 1030 HBasicBlock* HGraphBuilder::CreateLoopHeaderBlock() { 1031 HBasicBlock* header = graph()->CreateBasicBlock(); 1032 HEnvironment* entry_env = environment()->CopyAsLoopHeader(header); 1033 header->SetInitialEnvironment(entry_env); 1034 header->AttachLoopInformation(); 1035 return header; 1036 } 1037 1038 1039 HValue* HGraphBuilder::BuildCheckHeapObject(HValue* obj) { 1040 if (obj->type().IsHeapObject()) return obj; 1041 return Add<HCheckHeapObject>(obj); 1042 } 1043 1044 1045 void HGraphBuilder::FinishExitWithHardDeoptimization( 1046 const char* reason, HBasicBlock* continuation) { 1047 PadEnvironmentForContinuation(current_block(), continuation); 1048 Add<HDeoptimize>(reason, Deoptimizer::EAGER); 1049 if (graph()->IsInsideNoSideEffectsScope()) { 1050 current_block()->GotoNoSimulate(continuation); 1051 } else { 1052 current_block()->Goto(continuation); 1053 } 1054 } 1055 1056 1057 void HGraphBuilder::PadEnvironmentForContinuation( 1058 HBasicBlock* from, 1059 HBasicBlock* continuation) { 1060 if (continuation->last_environment() != NULL) { 1061 // When merging from a deopt block to a continuation, resolve differences in 1062 // environment by pushing constant 0 and popping extra values so that the 1063 // environments match during the join. Push 0 since it has the most specific 1064 // representation, and will not influence representation inference of the 1065 // phi. 1066 int continuation_env_length = continuation->last_environment()->length(); 1067 while (continuation_env_length != from->last_environment()->length()) { 1068 if (continuation_env_length > from->last_environment()->length()) { 1069 from->last_environment()->Push(graph()->GetConstant0()); 1070 } else { 1071 from->last_environment()->Pop(); 1072 } 1073 } 1074 } else { 1075 ASSERT(continuation->predecessors()->length() == 0); 1076 } 1077 } 1078 1079 1080 HValue* HGraphBuilder::BuildCheckMap(HValue* obj, Handle<Map> map) { 1081 return Add<HCheckMaps>(obj, map, top_info()); 1082 } 1083 1084 1085 HValue* HGraphBuilder::BuildWrapReceiver(HValue* object, HValue* function) { 1086 if (object->type().IsJSObject()) return object; 1087 return Add<HWrapReceiver>(object, function); 1088 } 1089 1090 1091 HValue* HGraphBuilder::BuildCheckForCapacityGrow(HValue* object, 1092 HValue* elements, 1093 ElementsKind kind, 1094 HValue* length, 1095 HValue* key, 1096 bool is_js_array) { 1097 Zone* zone = this->zone(); 1098 IfBuilder length_checker(this); 1099 1100 Token::Value token = IsHoleyElementsKind(kind) ? Token::GTE : Token::EQ; 1101 length_checker.If<HCompareNumericAndBranch>(key, length, token); 1102 1103 length_checker.Then(); 1104 1105 HValue* current_capacity = AddLoadFixedArrayLength(elements); 1106 1107 IfBuilder capacity_checker(this); 1108 1109 capacity_checker.If<HCompareNumericAndBranch>(key, current_capacity, 1110 Token::GTE); 1111 capacity_checker.Then(); 1112 1113 HValue* context = environment()->context(); 1114 1115 HValue* max_gap = Add<HConstant>(static_cast<int32_t>(JSObject::kMaxGap)); 1116 HValue* max_capacity = Add<HAdd>(current_capacity, max_gap); 1117 IfBuilder key_checker(this); 1118 key_checker.If<HCompareNumericAndBranch>(key, max_capacity, Token::LT); 1119 key_checker.Then(); 1120 key_checker.ElseDeopt("Key out of capacity range"); 1121 key_checker.End(); 1122 1123 HValue* new_capacity = BuildNewElementsCapacity(key); 1124 HValue* new_elements = BuildGrowElementsCapacity(object, elements, 1125 kind, kind, length, 1126 new_capacity); 1127 1128 environment()->Push(new_elements); 1129 capacity_checker.Else(); 1130 1131 environment()->Push(elements); 1132 capacity_checker.End(); 1133 1134 if (is_js_array) { 1135 HValue* new_length = AddInstruction( 1136 HAdd::New(zone, context, key, graph_->GetConstant1())); 1137 new_length->ClearFlag(HValue::kCanOverflow); 1138 1139 Add<HStoreNamedField>(object, HObjectAccess::ForArrayLength(kind), 1140 new_length); 1141 } 1142 1143 length_checker.Else(); 1144 Add<HBoundsCheck>(key, length); 1145 1146 environment()->Push(elements); 1147 length_checker.End(); 1148 1149 return environment()->Pop(); 1150 } 1151 1152 1153 HValue* HGraphBuilder::BuildCopyElementsOnWrite(HValue* object, 1154 HValue* elements, 1155 ElementsKind kind, 1156 HValue* length) { 1157 Factory* factory = isolate()->factory(); 1158 1159 IfBuilder cow_checker(this); 1160 1161 cow_checker.If<HCompareMap>(elements, factory->fixed_cow_array_map()); 1162 cow_checker.Then(); 1163 1164 HValue* capacity = AddLoadFixedArrayLength(elements); 1165 1166 HValue* new_elements = BuildGrowElementsCapacity(object, elements, kind, 1167 kind, length, capacity); 1168 1169 environment()->Push(new_elements); 1170 1171 cow_checker.Else(); 1172 1173 environment()->Push(elements); 1174 1175 cow_checker.End(); 1176 1177 return environment()->Pop(); 1178 } 1179 1180 1181 void HGraphBuilder::BuildTransitionElementsKind(HValue* object, 1182 HValue* map, 1183 ElementsKind from_kind, 1184 ElementsKind to_kind, 1185 bool is_jsarray) { 1186 ASSERT(!IsFastHoleyElementsKind(from_kind) || 1187 IsFastHoleyElementsKind(to_kind)); 1188 1189 if (AllocationSite::GetMode(from_kind, to_kind) == TRACK_ALLOCATION_SITE) { 1190 Add<HTrapAllocationMemento>(object); 1191 } 1192 1193 if (!IsSimpleMapChangeTransition(from_kind, to_kind)) { 1194 HInstruction* elements = AddLoadElements(object, NULL); 1195 1196 HInstruction* empty_fixed_array = Add<HConstant>( 1197 isolate()->factory()->empty_fixed_array()); 1198 1199 IfBuilder if_builder(this); 1200 1201 if_builder.IfNot<HCompareObjectEqAndBranch>(elements, empty_fixed_array); 1202 1203 if_builder.Then(); 1204 1205 HInstruction* elements_length = AddLoadFixedArrayLength(elements); 1206 1207 HInstruction* array_length = is_jsarray 1208 ? Add<HLoadNamedField>(object, HObjectAccess::ForArrayLength(from_kind)) 1209 : elements_length; 1210 1211 BuildGrowElementsCapacity(object, elements, from_kind, to_kind, 1212 array_length, elements_length); 1213 1214 if_builder.End(); 1215 } 1216 1217 Add<HStoreNamedField>(object, HObjectAccess::ForMap(), map); 1218 } 1219 1220 1221 HInstruction* HGraphBuilder::BuildUncheckedMonomorphicElementAccess( 1222 HValue* object, 1223 HValue* key, 1224 HValue* val, 1225 HCheckMaps* mapcheck, 1226 bool is_js_array, 1227 ElementsKind elements_kind, 1228 bool is_store, 1229 LoadKeyedHoleMode load_mode, 1230 KeyedAccessStoreMode store_mode) { 1231 ASSERT(!IsExternalArrayElementsKind(elements_kind) || !is_js_array); 1232 // No GVNFlag is necessary for ElementsKind if there is an explicit dependency 1233 // on a HElementsTransition instruction. The flag can also be removed if the 1234 // map to check has FAST_HOLEY_ELEMENTS, since there can be no further 1235 // ElementsKind transitions. Finally, the dependency can be removed for stores 1236 // for FAST_ELEMENTS, since a transition to HOLEY elements won't change the 1237 // generated store code. 1238 if ((elements_kind == FAST_HOLEY_ELEMENTS) || 1239 (elements_kind == FAST_ELEMENTS && is_store)) { 1240 if (mapcheck != NULL) { 1241 mapcheck->ClearGVNFlag(kDependsOnElementsKind); 1242 } 1243 } 1244 bool fast_smi_only_elements = IsFastSmiElementsKind(elements_kind); 1245 bool fast_elements = IsFastObjectElementsKind(elements_kind); 1246 HValue* elements = AddLoadElements(object, mapcheck); 1247 if (is_store && (fast_elements || fast_smi_only_elements) && 1248 store_mode != STORE_NO_TRANSITION_HANDLE_COW) { 1249 HCheckMaps* check_cow_map = Add<HCheckMaps>( 1250 elements, isolate()->factory()->fixed_array_map(), top_info()); 1251 check_cow_map->ClearGVNFlag(kDependsOnElementsKind); 1252 } 1253 HInstruction* length = NULL; 1254 if (is_js_array) { 1255 length = Add<HLoadNamedField>(object, 1256 HObjectAccess::ForArrayLength(elements_kind), mapcheck); 1257 } else { 1258 length = AddLoadFixedArrayLength(elements); 1259 } 1260 length->set_type(HType::Smi()); 1261 HValue* checked_key = NULL; 1262 if (IsExternalArrayElementsKind(elements_kind)) { 1263 if (store_mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS) { 1264 NoObservableSideEffectsScope no_effects(this); 1265 HLoadExternalArrayPointer* external_elements = 1266 Add<HLoadExternalArrayPointer>(elements); 1267 IfBuilder length_checker(this); 1268 length_checker.If<HCompareNumericAndBranch>(key, length, Token::LT); 1269 length_checker.Then(); 1270 IfBuilder negative_checker(this); 1271 HValue* bounds_check = negative_checker.If<HCompareNumericAndBranch>( 1272 key, graph()->GetConstant0(), Token::GTE); 1273 negative_checker.Then(); 1274 HInstruction* result = AddExternalArrayElementAccess( 1275 external_elements, key, val, bounds_check, elements_kind, is_store); 1276 negative_checker.ElseDeopt("Negative key encountered"); 1277 length_checker.End(); 1278 return result; 1279 } else { 1280 ASSERT(store_mode == STANDARD_STORE); 1281 checked_key = Add<HBoundsCheck>(key, length); 1282 HLoadExternalArrayPointer* external_elements = 1283 Add<HLoadExternalArrayPointer>(elements); 1284 return AddExternalArrayElementAccess( 1285 external_elements, checked_key, val, 1286 mapcheck, elements_kind, is_store); 1287 } 1288 } 1289 ASSERT(fast_smi_only_elements || 1290 fast_elements || 1291 IsFastDoubleElementsKind(elements_kind)); 1292 1293 // In case val is stored into a fast smi array, assure that the value is a smi 1294 // before manipulating the backing store. Otherwise the actual store may 1295 // deopt, leaving the backing store in an invalid state. 1296 if (is_store && IsFastSmiElementsKind(elements_kind) && 1297 !val->type().IsSmi()) { 1298 val = Add<HForceRepresentation>(val, Representation::Smi()); 1299 } 1300 1301 if (IsGrowStoreMode(store_mode)) { 1302 NoObservableSideEffectsScope no_effects(this); 1303 elements = BuildCheckForCapacityGrow(object, elements, elements_kind, 1304 length, key, is_js_array); 1305 checked_key = key; 1306 } else { 1307 checked_key = Add<HBoundsCheck>(key, length); 1308 1309 if (is_store && (fast_elements || fast_smi_only_elements)) { 1310 if (store_mode == STORE_NO_TRANSITION_HANDLE_COW) { 1311 NoObservableSideEffectsScope no_effects(this); 1312 1313 elements = BuildCopyElementsOnWrite(object, elements, elements_kind, 1314 length); 1315 } else { 1316 HCheckMaps* check_cow_map = Add<HCheckMaps>( 1317 elements, isolate()->factory()->fixed_array_map(), 1318 top_info()); 1319 check_cow_map->ClearGVNFlag(kDependsOnElementsKind); 1320 } 1321 } 1322 } 1323 return AddFastElementAccess(elements, checked_key, val, mapcheck, 1324 elements_kind, is_store, load_mode, store_mode); 1325 } 1326 1327 1328 HValue* HGraphBuilder::BuildAllocateElements(ElementsKind kind, 1329 HValue* capacity) { 1330 int elements_size; 1331 InstanceType instance_type; 1332 1333 if (IsFastDoubleElementsKind(kind)) { 1334 elements_size = kDoubleSize; 1335 instance_type = FIXED_DOUBLE_ARRAY_TYPE; 1336 } else { 1337 elements_size = kPointerSize; 1338 instance_type = FIXED_ARRAY_TYPE; 1339 } 1340 1341 HConstant* elements_size_value = Add<HConstant>(elements_size); 1342 HValue* mul = Add<HMul>(capacity, elements_size_value); 1343 mul->ClearFlag(HValue::kCanOverflow); 1344 1345 HConstant* header_size = Add<HConstant>(FixedArray::kHeaderSize); 1346 HValue* total_size = Add<HAdd>(mul, header_size); 1347 total_size->ClearFlag(HValue::kCanOverflow); 1348 1349 return Add<HAllocate>(total_size, HType::JSArray(), 1350 isolate()->heap()->GetPretenureMode(), instance_type); 1351 } 1352 1353 1354 void HGraphBuilder::BuildInitializeElementsHeader(HValue* elements, 1355 ElementsKind kind, 1356 HValue* capacity) { 1357 Factory* factory = isolate()->factory(); 1358 Handle<Map> map = IsFastDoubleElementsKind(kind) 1359 ? factory->fixed_double_array_map() 1360 : factory->fixed_array_map(); 1361 1362 AddStoreMapConstant(elements, map); 1363 Add<HStoreNamedField>(elements, HObjectAccess::ForFixedArrayLength(), 1364 capacity); 1365 } 1366 1367 1368 HValue* HGraphBuilder::BuildAllocateElementsAndInitializeElementsHeader( 1369 ElementsKind kind, 1370 HValue* capacity) { 1371 // The HForceRepresentation is to prevent possible deopt on int-smi 1372 // conversion after allocation but before the new object fields are set. 1373 capacity = Add<HForceRepresentation>(capacity, Representation::Smi()); 1374 HValue* new_elements = BuildAllocateElements(kind, capacity); 1375 BuildInitializeElementsHeader(new_elements, kind, capacity); 1376 return new_elements; 1377 } 1378 1379 1380 HInnerAllocatedObject* HGraphBuilder::BuildJSArrayHeader(HValue* array, 1381 HValue* array_map, 1382 AllocationSiteMode mode, 1383 ElementsKind elements_kind, 1384 HValue* allocation_site_payload, 1385 HValue* length_field) { 1386 1387 Add<HStoreNamedField>(array, HObjectAccess::ForMap(), array_map); 1388 1389 HConstant* empty_fixed_array = 1390 Add<HConstant>(isolate()->factory()->empty_fixed_array()); 1391 1392 HObjectAccess access = HObjectAccess::ForPropertiesPointer(); 1393 Add<HStoreNamedField>(array, access, empty_fixed_array); 1394 Add<HStoreNamedField>(array, HObjectAccess::ForArrayLength(elements_kind), 1395 length_field); 1396 1397 if (mode == TRACK_ALLOCATION_SITE) { 1398 BuildCreateAllocationMemento(array, 1399 JSArray::kSize, 1400 allocation_site_payload); 1401 } 1402 1403 int elements_location = JSArray::kSize; 1404 if (mode == TRACK_ALLOCATION_SITE) { 1405 elements_location += AllocationMemento::kSize; 1406 } 1407 1408 HValue* elements = Add<HInnerAllocatedObject>(array, elements_location); 1409 Add<HStoreNamedField>(array, HObjectAccess::ForElementsPointer(), elements); 1410 return static_cast<HInnerAllocatedObject*>(elements); 1411 } 1412 1413 1414 HInstruction* HGraphBuilder::AddExternalArrayElementAccess( 1415 HValue* external_elements, 1416 HValue* checked_key, 1417 HValue* val, 1418 HValue* dependency, 1419 ElementsKind elements_kind, 1420 bool is_store) { 1421 if (is_store) { 1422 ASSERT(val != NULL); 1423 switch (elements_kind) { 1424 case EXTERNAL_PIXEL_ELEMENTS: { 1425 val = Add<HClampToUint8>(val); 1426 break; 1427 } 1428 case EXTERNAL_BYTE_ELEMENTS: 1429 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS: 1430 case EXTERNAL_SHORT_ELEMENTS: 1431 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS: 1432 case EXTERNAL_INT_ELEMENTS: 1433 case EXTERNAL_UNSIGNED_INT_ELEMENTS: { 1434 break; 1435 } 1436 case EXTERNAL_FLOAT_ELEMENTS: 1437 case EXTERNAL_DOUBLE_ELEMENTS: 1438 break; 1439 case FAST_SMI_ELEMENTS: 1440 case FAST_ELEMENTS: 1441 case FAST_DOUBLE_ELEMENTS: 1442 case FAST_HOLEY_SMI_ELEMENTS: 1443 case FAST_HOLEY_ELEMENTS: 1444 case FAST_HOLEY_DOUBLE_ELEMENTS: 1445 case DICTIONARY_ELEMENTS: 1446 case NON_STRICT_ARGUMENTS_ELEMENTS: 1447 UNREACHABLE(); 1448 break; 1449 } 1450 return Add<HStoreKeyed>(external_elements, checked_key, val, elements_kind); 1451 } else { 1452 ASSERT(val == NULL); 1453 HLoadKeyed* load = Add<HLoadKeyed>(external_elements, 1454 checked_key, 1455 dependency, 1456 elements_kind); 1457 if (FLAG_opt_safe_uint32_operations && 1458 elements_kind == EXTERNAL_UNSIGNED_INT_ELEMENTS) { 1459 graph()->RecordUint32Instruction(load); 1460 } 1461 return load; 1462 } 1463 } 1464 1465 1466 HInstruction* HGraphBuilder::AddFastElementAccess( 1467 HValue* elements, 1468 HValue* checked_key, 1469 HValue* val, 1470 HValue* load_dependency, 1471 ElementsKind elements_kind, 1472 bool is_store, 1473 LoadKeyedHoleMode load_mode, 1474 KeyedAccessStoreMode store_mode) { 1475 if (is_store) { 1476 ASSERT(val != NULL); 1477 switch (elements_kind) { 1478 case FAST_SMI_ELEMENTS: 1479 case FAST_HOLEY_SMI_ELEMENTS: 1480 case FAST_ELEMENTS: 1481 case FAST_HOLEY_ELEMENTS: 1482 case FAST_DOUBLE_ELEMENTS: 1483 case FAST_HOLEY_DOUBLE_ELEMENTS: 1484 return Add<HStoreKeyed>(elements, checked_key, val, elements_kind); 1485 default: 1486 UNREACHABLE(); 1487 return NULL; 1488 } 1489 } 1490 // It's an element load (!is_store). 1491 return Add<HLoadKeyed>( 1492 elements, checked_key, load_dependency, elements_kind, load_mode); 1493 } 1494 1495 1496 HLoadNamedField* HGraphBuilder::AddLoadElements(HValue* object, 1497 HValue* typecheck) { 1498 return Add<HLoadNamedField>(object, 1499 HObjectAccess::ForElementsPointer(), 1500 typecheck); 1501 } 1502 1503 1504 HLoadNamedField* HGraphBuilder::AddLoadFixedArrayLength(HValue* object) { 1505 return Add<HLoadNamedField>(object, 1506 HObjectAccess::ForFixedArrayLength()); 1507 } 1508 1509 1510 HValue* HGraphBuilder::BuildNewElementsCapacity(HValue* old_capacity) { 1511 HValue* half_old_capacity = Add<HShr>(old_capacity, graph_->GetConstant1()); 1512 1513 HValue* new_capacity = Add<HAdd>(half_old_capacity, old_capacity); 1514 new_capacity->ClearFlag(HValue::kCanOverflow); 1515 1516 HValue* min_growth = Add<HConstant>(16); 1517 1518 new_capacity = Add<HAdd>(new_capacity, min_growth); 1519 new_capacity->ClearFlag(HValue::kCanOverflow); 1520 1521 return new_capacity; 1522 } 1523 1524 1525 void HGraphBuilder::BuildNewSpaceArrayCheck(HValue* length, ElementsKind kind) { 1526 Heap* heap = isolate()->heap(); 1527 int element_size = IsFastDoubleElementsKind(kind) ? kDoubleSize 1528 : kPointerSize; 1529 int max_size = heap->MaxRegularSpaceAllocationSize() / element_size; 1530 max_size -= JSArray::kSize / element_size; 1531 HConstant* max_size_constant = Add<HConstant>(max_size); 1532 Add<HBoundsCheck>(length, max_size_constant); 1533 } 1534 1535 1536 HValue* HGraphBuilder::BuildGrowElementsCapacity(HValue* object, 1537 HValue* elements, 1538 ElementsKind kind, 1539 ElementsKind new_kind, 1540 HValue* length, 1541 HValue* new_capacity) { 1542 BuildNewSpaceArrayCheck(new_capacity, new_kind); 1543 1544 HValue* new_elements = BuildAllocateElementsAndInitializeElementsHeader( 1545 new_kind, new_capacity); 1546 1547 BuildCopyElements(elements, kind, 1548 new_elements, new_kind, 1549 length, new_capacity); 1550 1551 Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(), 1552 new_elements); 1553 1554 return new_elements; 1555 } 1556 1557 1558 void HGraphBuilder::BuildFillElementsWithHole(HValue* elements, 1559 ElementsKind elements_kind, 1560 HValue* from, 1561 HValue* to) { 1562 // Fast elements kinds need to be initialized in case statements below cause 1563 // a garbage collection. 1564 Factory* factory = isolate()->factory(); 1565 1566 double nan_double = FixedDoubleArray::hole_nan_as_double(); 1567 HValue* hole = IsFastSmiOrObjectElementsKind(elements_kind) 1568 ? Add<HConstant>(factory->the_hole_value()) 1569 : Add<HConstant>(nan_double); 1570 1571 // Special loop unfolding case 1572 static const int kLoopUnfoldLimit = 4; 1573 bool unfold_loop = false; 1574 int initial_capacity = JSArray::kPreallocatedArrayElements; 1575 if (from->IsConstant() && to->IsConstant() && 1576 initial_capacity <= kLoopUnfoldLimit) { 1577 HConstant* constant_from = HConstant::cast(from); 1578 HConstant* constant_to = HConstant::cast(to); 1579 1580 if (constant_from->HasInteger32Value() && 1581 constant_from->Integer32Value() == 0 && 1582 constant_to->HasInteger32Value() && 1583 constant_to->Integer32Value() == initial_capacity) { 1584 unfold_loop = true; 1585 } 1586 } 1587 1588 // Since we're about to store a hole value, the store instruction below must 1589 // assume an elements kind that supports heap object values. 1590 if (IsFastSmiOrObjectElementsKind(elements_kind)) { 1591 elements_kind = FAST_HOLEY_ELEMENTS; 1592 } 1593 1594 if (unfold_loop) { 1595 for (int i = 0; i < initial_capacity; i++) { 1596 HInstruction* key = Add<HConstant>(i); 1597 Add<HStoreKeyed>(elements, key, hole, elements_kind); 1598 } 1599 } else { 1600 LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement); 1601 1602 HValue* key = builder.BeginBody(from, to, Token::LT); 1603 1604 Add<HStoreKeyed>(elements, key, hole, elements_kind); 1605 1606 builder.EndBody(); 1607 } 1608 } 1609 1610 1611 void HGraphBuilder::BuildCopyElements(HValue* from_elements, 1612 ElementsKind from_elements_kind, 1613 HValue* to_elements, 1614 ElementsKind to_elements_kind, 1615 HValue* length, 1616 HValue* capacity) { 1617 bool pre_fill_with_holes = 1618 IsFastDoubleElementsKind(from_elements_kind) && 1619 IsFastObjectElementsKind(to_elements_kind); 1620 1621 if (pre_fill_with_holes) { 1622 // If the copy might trigger a GC, make sure that the FixedArray is 1623 // pre-initialized with holes to make sure that it's always in a consistent 1624 // state. 1625 BuildFillElementsWithHole(to_elements, to_elements_kind, 1626 graph()->GetConstant0(), capacity); 1627 } 1628 1629 LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement); 1630 1631 HValue* key = builder.BeginBody(graph()->GetConstant0(), length, Token::LT); 1632 1633 HValue* element = Add<HLoadKeyed>(from_elements, key, 1634 static_cast<HValue*>(NULL), 1635 from_elements_kind, 1636 ALLOW_RETURN_HOLE); 1637 1638 ElementsKind kind = (IsHoleyElementsKind(from_elements_kind) && 1639 IsFastSmiElementsKind(to_elements_kind)) 1640 ? FAST_HOLEY_ELEMENTS : to_elements_kind; 1641 1642 if (IsHoleyElementsKind(from_elements_kind) && 1643 from_elements_kind != to_elements_kind) { 1644 IfBuilder if_hole(this); 1645 if_hole.If<HCompareHoleAndBranch>(element); 1646 if_hole.Then(); 1647 HConstant* hole_constant = IsFastDoubleElementsKind(to_elements_kind) 1648 ? Add<HConstant>(FixedDoubleArray::hole_nan_as_double()) 1649 : graph()->GetConstantHole(); 1650 Add<HStoreKeyed>(to_elements, key, hole_constant, kind); 1651 if_hole.Else(); 1652 HStoreKeyed* store = Add<HStoreKeyed>(to_elements, key, element, kind); 1653 store->SetFlag(HValue::kAllowUndefinedAsNaN); 1654 if_hole.End(); 1655 } else { 1656 HStoreKeyed* store = Add<HStoreKeyed>(to_elements, key, element, kind); 1657 store->SetFlag(HValue::kAllowUndefinedAsNaN); 1658 } 1659 1660 builder.EndBody(); 1661 1662 if (!pre_fill_with_holes && length != capacity) { 1663 // Fill unused capacity with the hole. 1664 BuildFillElementsWithHole(to_elements, to_elements_kind, 1665 key, capacity); 1666 } 1667 } 1668 1669 1670 HValue* HGraphBuilder::BuildCloneShallowArray(HValue* boilerplate, 1671 HValue* allocation_site, 1672 AllocationSiteMode mode, 1673 ElementsKind kind, 1674 int length) { 1675 NoObservableSideEffectsScope no_effects(this); 1676 1677 // All sizes here are multiples of kPointerSize. 1678 int size = JSArray::kSize; 1679 if (mode == TRACK_ALLOCATION_SITE) { 1680 size += AllocationMemento::kSize; 1681 } 1682 int elems_offset = size; 1683 InstanceType instance_type = IsFastDoubleElementsKind(kind) ? 1684 FIXED_DOUBLE_ARRAY_TYPE : FIXED_ARRAY_TYPE; 1685 if (length > 0) { 1686 size += IsFastDoubleElementsKind(kind) 1687 ? FixedDoubleArray::SizeFor(length) 1688 : FixedArray::SizeFor(length); 1689 } 1690 1691 // Allocate both the JS array and the elements array in one big 1692 // allocation. This avoids multiple limit checks. 1693 HValue* size_in_bytes = Add<HConstant>(size); 1694 HInstruction* object = Add<HAllocate>(size_in_bytes, 1695 HType::JSObject(), 1696 NOT_TENURED, 1697 instance_type); 1698 1699 // Copy the JS array part. 1700 for (int i = 0; i < JSArray::kSize; i += kPointerSize) { 1701 if ((i != JSArray::kElementsOffset) || (length == 0)) { 1702 HObjectAccess access = HObjectAccess::ForJSArrayOffset(i); 1703 Add<HStoreNamedField>(object, access, 1704 Add<HLoadNamedField>(boilerplate, access)); 1705 } 1706 } 1707 1708 // Create an allocation site info if requested. 1709 if (mode == TRACK_ALLOCATION_SITE) { 1710 BuildCreateAllocationMemento(object, JSArray::kSize, allocation_site); 1711 } 1712 1713 if (length > 0) { 1714 // Get hold of the elements array of the boilerplate and setup the 1715 // elements pointer in the resulting object. 1716 HValue* boilerplate_elements = AddLoadElements(boilerplate, NULL); 1717 HValue* object_elements = Add<HInnerAllocatedObject>(object, elems_offset); 1718 Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(), 1719 object_elements); 1720 1721 // Copy the elements array header. 1722 for (int i = 0; i < FixedArrayBase::kHeaderSize; i += kPointerSize) { 1723 HObjectAccess access = HObjectAccess::ForFixedArrayHeader(i); 1724 Add<HStoreNamedField>(object_elements, access, 1725 Add<HLoadNamedField>(boilerplate_elements, access)); 1726 } 1727 1728 // Copy the elements array contents. 1729 // TODO(mstarzinger): Teach HGraphBuilder::BuildCopyElements to unfold 1730 // copying loops with constant length up to a given boundary and use this 1731 // helper here instead. 1732 for (int i = 0; i < length; i++) { 1733 HValue* key_constant = Add<HConstant>(i); 1734 HInstruction* value = Add<HLoadKeyed>(boilerplate_elements, key_constant, 1735 static_cast<HValue*>(NULL), kind); 1736 Add<HStoreKeyed>(object_elements, key_constant, value, kind); 1737 } 1738 } 1739 1740 return object; 1741 } 1742 1743 1744 void HGraphBuilder::BuildCompareNil( 1745 HValue* value, 1746 Handle<Type> type, 1747 int position, 1748 HIfContinuation* continuation) { 1749 IfBuilder if_nil(this, position); 1750 bool needs_or = false; 1751 if (type->Maybe(Type::Null())) { 1752 if (needs_or) if_nil.Or(); 1753 if_nil.If<HCompareObjectEqAndBranch>(value, graph()->GetConstantNull()); 1754 needs_or = true; 1755 } 1756 if (type->Maybe(Type::Undefined())) { 1757 if (needs_or) if_nil.Or(); 1758 if_nil.If<HCompareObjectEqAndBranch>(value, 1759 graph()->GetConstantUndefined()); 1760 needs_or = true; 1761 } 1762 if (type->Maybe(Type::Undetectable())) { 1763 if (needs_or) if_nil.Or(); 1764 if_nil.If<HIsUndetectableAndBranch>(value); 1765 } else { 1766 if_nil.Then(); 1767 if_nil.Else(); 1768 if (type->NumClasses() == 1) { 1769 BuildCheckHeapObject(value); 1770 // For ICs, the map checked below is a sentinel map that gets replaced by 1771 // the monomorphic map when the code is used as a template to generate a 1772 // new IC. For optimized functions, there is no sentinel map, the map 1773 // emitted below is the actual monomorphic map. 1774 BuildCheckMap(value, type->Classes().Current()); 1775 } else { 1776 if_nil.Deopt("Too many undetectable types"); 1777 } 1778 } 1779 1780 if_nil.CaptureContinuation(continuation); 1781 } 1782 1783 1784 HValue* HGraphBuilder::BuildCreateAllocationMemento(HValue* previous_object, 1785 int previous_object_size, 1786 HValue* alloc_site) { 1787 ASSERT(alloc_site != NULL); 1788 HInnerAllocatedObject* alloc_memento = Add<HInnerAllocatedObject>( 1789 previous_object, previous_object_size); 1790 Handle<Map> alloc_memento_map( 1791 isolate()->heap()->allocation_memento_map()); 1792 AddStoreMapConstant(alloc_memento, alloc_memento_map); 1793 HObjectAccess access = HObjectAccess::ForAllocationMementoSite(); 1794 Add<HStoreNamedField>(alloc_memento, access, alloc_site); 1795 return alloc_memento; 1796 } 1797 1798 1799 HInstruction* HGraphBuilder::BuildGetNativeContext() { 1800 // Get the global context, then the native context 1801 HInstruction* global_object = Add<HGlobalObject>(); 1802 HObjectAccess access = HObjectAccess::ForJSObjectOffset( 1803 GlobalObject::kNativeContextOffset); 1804 return Add<HLoadNamedField>(global_object, access); 1805 } 1806 1807 1808 HInstruction* HGraphBuilder::BuildGetArrayFunction() { 1809 HInstruction* native_context = BuildGetNativeContext(); 1810 HInstruction* index = 1811 Add<HConstant>(static_cast<int32_t>(Context::ARRAY_FUNCTION_INDEX)); 1812 return Add<HLoadKeyed>( 1813 native_context, index, static_cast<HValue*>(NULL), FAST_ELEMENTS); 1814 } 1815 1816 1817 HGraphBuilder::JSArrayBuilder::JSArrayBuilder(HGraphBuilder* builder, 1818 ElementsKind kind, 1819 HValue* allocation_site_payload, 1820 HValue* constructor_function, 1821 AllocationSiteOverrideMode override_mode) : 1822 builder_(builder), 1823 kind_(kind), 1824 allocation_site_payload_(allocation_site_payload), 1825 constructor_function_(constructor_function) { 1826 mode_ = override_mode == DISABLE_ALLOCATION_SITES 1827 ? DONT_TRACK_ALLOCATION_SITE 1828 : AllocationSite::GetMode(kind); 1829 } 1830 1831 1832 HGraphBuilder::JSArrayBuilder::JSArrayBuilder(HGraphBuilder* builder, 1833 ElementsKind kind, 1834 HValue* constructor_function) : 1835 builder_(builder), 1836 kind_(kind), 1837 mode_(DONT_TRACK_ALLOCATION_SITE), 1838 allocation_site_payload_(NULL), 1839 constructor_function_(constructor_function) { 1840 } 1841 1842 1843 HValue* HGraphBuilder::JSArrayBuilder::EmitMapCode() { 1844 if (kind_ == GetInitialFastElementsKind()) { 1845 // No need for a context lookup if the kind_ matches the initial 1846 // map, because we can just load the map in that case. 1847 HObjectAccess access = HObjectAccess::ForPrototypeOrInitialMap(); 1848 return builder()->AddInstruction( 1849 builder()->BuildLoadNamedField(constructor_function_, access, NULL)); 1850 } 1851 1852 HInstruction* native_context = builder()->BuildGetNativeContext(); 1853 HInstruction* index = builder()->Add<HConstant>( 1854 static_cast<int32_t>(Context::JS_ARRAY_MAPS_INDEX)); 1855 1856 HInstruction* map_array = builder()->Add<HLoadKeyed>( 1857 native_context, index, static_cast<HValue*>(NULL), FAST_ELEMENTS); 1858 1859 HInstruction* kind_index = builder()->Add<HConstant>(kind_); 1860 1861 return builder()->Add<HLoadKeyed>( 1862 map_array, kind_index, static_cast<HValue*>(NULL), FAST_ELEMENTS); 1863 } 1864 1865 1866 HValue* HGraphBuilder::JSArrayBuilder::EmitInternalMapCode() { 1867 // Find the map near the constructor function 1868 HObjectAccess access = HObjectAccess::ForPrototypeOrInitialMap(); 1869 return builder()->AddInstruction( 1870 builder()->BuildLoadNamedField(constructor_function_, access, NULL)); 1871 } 1872 1873 1874 HValue* HGraphBuilder::JSArrayBuilder::EstablishAllocationSize( 1875 HValue* length_node) { 1876 ASSERT(length_node != NULL); 1877 1878 int base_size = JSArray::kSize; 1879 if (mode_ == TRACK_ALLOCATION_SITE) { 1880 base_size += AllocationMemento::kSize; 1881 } 1882 1883 STATIC_ASSERT(FixedDoubleArray::kHeaderSize == FixedArray::kHeaderSize); 1884 base_size += FixedArray::kHeaderSize; 1885 1886 HInstruction* elements_size_value = 1887 builder()->Add<HConstant>(elements_size()); 1888 HInstruction* mul = builder()->Add<HMul>(length_node, elements_size_value); 1889 mul->ClearFlag(HValue::kCanOverflow); 1890 1891 HInstruction* base = builder()->Add<HConstant>(base_size); 1892 HInstruction* total_size = builder()->Add<HAdd>(base, mul); 1893 total_size->ClearFlag(HValue::kCanOverflow); 1894 return total_size; 1895 } 1896 1897 1898 HValue* HGraphBuilder::JSArrayBuilder::EstablishEmptyArrayAllocationSize() { 1899 int base_size = JSArray::kSize; 1900 if (mode_ == TRACK_ALLOCATION_SITE) { 1901 base_size += AllocationMemento::kSize; 1902 } 1903 1904 base_size += IsFastDoubleElementsKind(kind_) 1905 ? FixedDoubleArray::SizeFor(initial_capacity()) 1906 : FixedArray::SizeFor(initial_capacity()); 1907 1908 return builder()->Add<HConstant>(base_size); 1909 } 1910 1911 1912 HValue* HGraphBuilder::JSArrayBuilder::AllocateEmptyArray() { 1913 HValue* size_in_bytes = EstablishEmptyArrayAllocationSize(); 1914 HConstant* capacity = builder()->Add<HConstant>(initial_capacity()); 1915 return AllocateArray(size_in_bytes, 1916 capacity, 1917 builder()->graph()->GetConstant0(), 1918 true); 1919 } 1920 1921 1922 HValue* HGraphBuilder::JSArrayBuilder::AllocateArray(HValue* capacity, 1923 HValue* length_field, 1924 bool fill_with_hole) { 1925 HValue* size_in_bytes = EstablishAllocationSize(capacity); 1926 return AllocateArray(size_in_bytes, capacity, length_field, fill_with_hole); 1927 } 1928 1929 1930 HValue* HGraphBuilder::JSArrayBuilder::AllocateArray(HValue* size_in_bytes, 1931 HValue* capacity, 1932 HValue* length_field, 1933 bool fill_with_hole) { 1934 // These HForceRepresentations are because we store these as fields in the 1935 // objects we construct, and an int32-to-smi HChange could deopt. Accept 1936 // the deopt possibility now, before allocation occurs. 1937 capacity = builder()->Add<HForceRepresentation>(capacity, 1938 Representation::Smi()); 1939 length_field = builder()->Add<HForceRepresentation>(length_field, 1940 Representation::Smi()); 1941 // Allocate (dealing with failure appropriately) 1942 HAllocate* new_object = builder()->Add<HAllocate>(size_in_bytes, 1943 HType::JSArray(), NOT_TENURED, JS_ARRAY_TYPE); 1944 1945 // Fill in the fields: map, properties, length 1946 HValue* map; 1947 if (allocation_site_payload_ == NULL) { 1948 map = EmitInternalMapCode(); 1949 } else { 1950 map = EmitMapCode(); 1951 } 1952 elements_location_ = builder()->BuildJSArrayHeader(new_object, 1953 map, 1954 mode_, 1955 kind_, 1956 allocation_site_payload_, 1957 length_field); 1958 1959 // Initialize the elements 1960 builder()->BuildInitializeElementsHeader(elements_location_, kind_, capacity); 1961 1962 if (fill_with_hole) { 1963 builder()->BuildFillElementsWithHole(elements_location_, kind_, 1964 graph()->GetConstant0(), capacity); 1965 } 1966 1967 return new_object; 1968 } 1969 1970 1971 HStoreNamedField* HGraphBuilder::AddStoreMapConstant(HValue *object, 1972 Handle<Map> map) { 1973 return Add<HStoreNamedField>(object, HObjectAccess::ForMap(), 1974 Add<HConstant>(map)); 1975 } 1976 1977 1978 HValue* HGraphBuilder::AddLoadJSBuiltin(Builtins::JavaScript builtin) { 1979 HGlobalObject* global_object = Add<HGlobalObject>(); 1980 HObjectAccess access = HObjectAccess::ForJSObjectOffset( 1981 GlobalObject::kBuiltinsOffset); 1982 HValue* builtins = Add<HLoadNamedField>(global_object, access); 1983 HObjectAccess function_access = HObjectAccess::ForJSObjectOffset( 1984 JSBuiltinsObject::OffsetOfFunctionWithId(builtin)); 1985 return Add<HLoadNamedField>(builtins, function_access); 1986 } 1987 1988 1989 HOptimizedGraphBuilder::HOptimizedGraphBuilder(CompilationInfo* info) 1990 : HGraphBuilder(info), 1991 function_state_(NULL), 1992 initial_function_state_(this, info, NORMAL_RETURN), 1993 ast_context_(NULL), 1994 break_scope_(NULL), 1995 inlined_count_(0), 1996 globals_(10, info->zone()), 1997 inline_bailout_(false), 1998 osr_(new(info->zone()) HOsrBuilder(this)) { 1999 // This is not initialized in the initializer list because the 2000 // constructor for the initial state relies on function_state_ == NULL 2001 // to know it's the initial state. 2002 function_state_= &initial_function_state_; 2003 InitializeAstVisitor(); 2004 } 2005 2006 2007 HBasicBlock* HOptimizedGraphBuilder::CreateJoin(HBasicBlock* first, 2008 HBasicBlock* second, 2009 BailoutId join_id) { 2010 if (first == NULL) { 2011 return second; 2012 } else if (second == NULL) { 2013 return first; 2014 } else { 2015 HBasicBlock* join_block = graph()->CreateBasicBlock(); 2016 first->Goto(join_block); 2017 second->Goto(join_block); 2018 join_block->SetJoinId(join_id); 2019 return join_block; 2020 } 2021 } 2022 2023 2024 HBasicBlock* HOptimizedGraphBuilder::JoinContinue(IterationStatement* statement, 2025 HBasicBlock* exit_block, 2026 HBasicBlock* continue_block) { 2027 if (continue_block != NULL) { 2028 if (exit_block != NULL) exit_block->Goto(continue_block); 2029 continue_block->SetJoinId(statement->ContinueId()); 2030 return continue_block; 2031 } 2032 return exit_block; 2033 } 2034 2035 2036 HBasicBlock* HOptimizedGraphBuilder::CreateLoop(IterationStatement* statement, 2037 HBasicBlock* loop_entry, 2038 HBasicBlock* body_exit, 2039 HBasicBlock* loop_successor, 2040 HBasicBlock* break_block) { 2041 if (body_exit != NULL) body_exit->Goto(loop_entry); 2042 loop_entry->PostProcessLoopHeader(statement); 2043 if (break_block != NULL) { 2044 if (loop_successor != NULL) loop_successor->Goto(break_block); 2045 break_block->SetJoinId(statement->ExitId()); 2046 return break_block; 2047 } 2048 return loop_successor; 2049 } 2050 2051 2052 void HBasicBlock::FinishExit(HControlInstruction* instruction) { 2053 Finish(instruction); 2054 ClearEnvironment(); 2055 } 2056 2057 2058 HGraph::HGraph(CompilationInfo* info) 2059 : isolate_(info->isolate()), 2060 next_block_id_(0), 2061 entry_block_(NULL), 2062 blocks_(8, info->zone()), 2063 values_(16, info->zone()), 2064 phi_list_(NULL), 2065 uint32_instructions_(NULL), 2066 osr_(NULL), 2067 info_(info), 2068 zone_(info->zone()), 2069 is_recursive_(false), 2070 use_optimistic_licm_(false), 2071 has_soft_deoptimize_(false), 2072 depends_on_empty_array_proto_elements_(false), 2073 type_change_checksum_(0), 2074 maximum_environment_size_(0), 2075 no_side_effects_scope_count_(0) { 2076 if (info->IsStub()) { 2077 HydrogenCodeStub* stub = info->code_stub(); 2078 CodeStubInterfaceDescriptor* descriptor = 2079 stub->GetInterfaceDescriptor(isolate_); 2080 start_environment_ = 2081 new(zone_) HEnvironment(zone_, descriptor->environment_length()); 2082 } else { 2083 start_environment_ = 2084 new(zone_) HEnvironment(NULL, info->scope(), info->closure(), zone_); 2085 } 2086 start_environment_->set_ast_id(BailoutId::FunctionEntry()); 2087 entry_block_ = CreateBasicBlock(); 2088 entry_block_->SetInitialEnvironment(start_environment_); 2089 } 2090 2091 2092 HBasicBlock* HGraph::CreateBasicBlock() { 2093 HBasicBlock* result = new(zone()) HBasicBlock(this); 2094 blocks_.Add(result, zone()); 2095 return result; 2096 } 2097 2098 2099 void HGraph::FinalizeUniqueValueIds() { 2100 DisallowHeapAllocation no_gc; 2101 ASSERT(!isolate()->optimizing_compiler_thread()->IsOptimizerThread()); 2102 for (int i = 0; i < blocks()->length(); ++i) { 2103 for (HInstructionIterator it(blocks()->at(i)); !it.Done(); it.Advance()) { 2104 it.Current()->FinalizeUniqueValueId(); 2105 } 2106 } 2107 } 2108 2109 2110 // Block ordering was implemented with two mutually recursive methods, 2111 // HGraph::Postorder and HGraph::PostorderLoopBlocks. 2112 // The recursion could lead to stack overflow so the algorithm has been 2113 // implemented iteratively. 2114 // At a high level the algorithm looks like this: 2115 // 2116 // Postorder(block, loop_header) : { 2117 // if (block has already been visited or is of another loop) return; 2118 // mark block as visited; 2119 // if (block is a loop header) { 2120 // VisitLoopMembers(block, loop_header); 2121 // VisitSuccessorsOfLoopHeader(block); 2122 // } else { 2123 // VisitSuccessors(block) 2124 // } 2125 // put block in result list; 2126 // } 2127 // 2128 // VisitLoopMembers(block, outer_loop_header) { 2129 // foreach (block b in block loop members) { 2130 // VisitSuccessorsOfLoopMember(b, outer_loop_header); 2131 // if (b is loop header) VisitLoopMembers(b); 2132 // } 2133 // } 2134 // 2135 // VisitSuccessorsOfLoopMember(block, outer_loop_header) { 2136 // foreach (block b in block successors) Postorder(b, outer_loop_header) 2137 // } 2138 // 2139 // VisitSuccessorsOfLoopHeader(block) { 2140 // foreach (block b in block successors) Postorder(b, block) 2141 // } 2142 // 2143 // VisitSuccessors(block, loop_header) { 2144 // foreach (block b in block successors) Postorder(b, loop_header) 2145 // } 2146 // 2147 // The ordering is started calling Postorder(entry, NULL). 2148 // 2149 // Each instance of PostorderProcessor represents the "stack frame" of the 2150 // recursion, and particularly keeps the state of the loop (iteration) of the 2151 // "Visit..." function it represents. 2152 // To recycle memory we keep all the frames in a double linked list but 2153 // this means that we cannot use constructors to initialize the frames. 2154 // 2155 class PostorderProcessor : public ZoneObject { 2156 public: 2157 // Back link (towards the stack bottom). 2158 PostorderProcessor* parent() {return father_; } 2159 // Forward link (towards the stack top). 2160 PostorderProcessor* child() {return child_; } 2161 HBasicBlock* block() { return block_; } 2162 HLoopInformation* loop() { return loop_; } 2163 HBasicBlock* loop_header() { return loop_header_; } 2164 2165 static PostorderProcessor* CreateEntryProcessor(Zone* zone, 2166 HBasicBlock* block, 2167 BitVector* visited) { 2168 PostorderProcessor* result = new(zone) PostorderProcessor(NULL); 2169 return result->SetupSuccessors(zone, block, NULL, visited); 2170 } 2171 2172 PostorderProcessor* PerformStep(Zone* zone, 2173 BitVector* visited, 2174 ZoneList<HBasicBlock*>* order) { 2175 PostorderProcessor* next = 2176 PerformNonBacktrackingStep(zone, visited, order); 2177 if (next != NULL) { 2178 return next; 2179 } else { 2180 return Backtrack(zone, visited, order); 2181 } 2182 } 2183 2184 private: 2185 explicit PostorderProcessor(PostorderProcessor* father) 2186 : father_(father), child_(NULL), successor_iterator(NULL) { } 2187 2188 // Each enum value states the cycle whose state is kept by this instance. 2189 enum LoopKind { 2190 NONE, 2191 SUCCESSORS, 2192 SUCCESSORS_OF_LOOP_HEADER, 2193 LOOP_MEMBERS, 2194 SUCCESSORS_OF_LOOP_MEMBER 2195 }; 2196 2197 // Each "Setup..." method is like a constructor for a cycle state. 2198 PostorderProcessor* SetupSuccessors(Zone* zone, 2199 HBasicBlock* block, 2200 HBasicBlock* loop_header, 2201 BitVector* visited) { 2202 if (block == NULL || visited->Contains(block->block_id()) || 2203 block->parent_loop_header() != loop_header) { 2204 kind_ = NONE; 2205 block_ = NULL; 2206 loop_ = NULL; 2207 loop_header_ = NULL; 2208 return this; 2209 } else { 2210 block_ = block; 2211 loop_ = NULL; 2212 visited->Add(block->block_id()); 2213 2214 if (block->IsLoopHeader()) { 2215 kind_ = SUCCESSORS_OF_LOOP_HEADER; 2216 loop_header_ = block; 2217 InitializeSuccessors(); 2218 PostorderProcessor* result = Push(zone); 2219 return result->SetupLoopMembers(zone, block, block->loop_information(), 2220 loop_header); 2221 } else { 2222 ASSERT(block->IsFinished()); 2223 kind_ = SUCCESSORS; 2224 loop_header_ = loop_header; 2225 InitializeSuccessors(); 2226 return this; 2227 } 2228 } 2229 } 2230 2231 PostorderProcessor* SetupLoopMembers(Zone* zone, 2232 HBasicBlock* block, 2233 HLoopInformation* loop, 2234 HBasicBlock* loop_header) { 2235 kind_ = LOOP_MEMBERS; 2236 block_ = block; 2237 loop_ = loop; 2238 loop_header_ = loop_header; 2239 InitializeLoopMembers(); 2240 return this; 2241 } 2242 2243 PostorderProcessor* SetupSuccessorsOfLoopMember( 2244 HBasicBlock* block, 2245 HLoopInformation* loop, 2246 HBasicBlock* loop_header) { 2247 kind_ = SUCCESSORS_OF_LOOP_MEMBER; 2248 block_ = block; 2249 loop_ = loop; 2250 loop_header_ = loop_header; 2251 InitializeSuccessors(); 2252 return this; 2253 } 2254 2255 // This method "allocates" a new stack frame. 2256 PostorderProcessor* Push(Zone* zone) { 2257 if (child_ == NULL) { 2258 child_ = new(zone) PostorderProcessor(this); 2259 } 2260 return child_; 2261 } 2262 2263 void ClosePostorder(ZoneList<HBasicBlock*>* order, Zone* zone) { 2264 ASSERT(block_->end()->FirstSuccessor() == NULL || 2265 order->Contains(block_->end()->FirstSuccessor()) || 2266 block_->end()->FirstSuccessor()->IsLoopHeader()); 2267 ASSERT(block_->end()->SecondSuccessor() == NULL || 2268 order->Contains(block_->end()->SecondSuccessor()) || 2269 block_->end()->SecondSuccessor()->IsLoopHeader()); 2270 order->Add(block_, zone); 2271 } 2272 2273 // This method is the basic block to walk up the stack. 2274 PostorderProcessor* Pop(Zone* zone, 2275 BitVector* visited, 2276 ZoneList<HBasicBlock*>* order) { 2277 switch (kind_) { 2278 case SUCCESSORS: 2279 case SUCCESSORS_OF_LOOP_HEADER: 2280 ClosePostorder(order, zone); 2281 return father_; 2282 case LOOP_MEMBERS: 2283 return father_; 2284 case SUCCESSORS_OF_LOOP_MEMBER: 2285 if (block()->IsLoopHeader() && block() != loop_->loop_header()) { 2286 // In this case we need to perform a LOOP_MEMBERS cycle so we 2287 // initialize it and return this instead of father. 2288 return SetupLoopMembers(zone, block(), 2289 block()->loop_information(), loop_header_); 2290 } else { 2291 return father_; 2292 } 2293 case NONE: 2294 return father_; 2295 } 2296 UNREACHABLE(); 2297 return NULL; 2298 } 2299 2300 // Walks up the stack. 2301 PostorderProcessor* Backtrack(Zone* zone, 2302 BitVector* visited, 2303 ZoneList<HBasicBlock*>* order) { 2304 PostorderProcessor* parent = Pop(zone, visited, order); 2305 while (parent != NULL) { 2306 PostorderProcessor* next = 2307 parent->PerformNonBacktrackingStep(zone, visited, order); 2308 if (next != NULL) { 2309 return next; 2310 } else { 2311 parent = parent->Pop(zone, visited, order); 2312 } 2313 } 2314 return NULL; 2315 } 2316 2317 PostorderProcessor* PerformNonBacktrackingStep( 2318 Zone* zone, 2319 BitVector* visited, 2320 ZoneList<HBasicBlock*>* order) { 2321 HBasicBlock* next_block; 2322 switch (kind_) { 2323 case SUCCESSORS: 2324 next_block = AdvanceSuccessors(); 2325 if (next_block != NULL) { 2326 PostorderProcessor* result = Push(zone); 2327 return result->SetupSuccessors(zone, next_block, 2328 loop_header_, visited); 2329 } 2330 break; 2331 case SUCCESSORS_OF_LOOP_HEADER: 2332 next_block = AdvanceSuccessors(); 2333 if (next_block != NULL) { 2334 PostorderProcessor* result = Push(zone); 2335 return result->SetupSuccessors(zone, next_block, 2336 block(), visited); 2337 } 2338 break; 2339 case LOOP_MEMBERS: 2340 next_block = AdvanceLoopMembers(); 2341 if (next_block != NULL) { 2342 PostorderProcessor* result = Push(zone); 2343 return result->SetupSuccessorsOfLoopMember(next_block, 2344 loop_, loop_header_); 2345 } 2346 break; 2347 case SUCCESSORS_OF_LOOP_MEMBER: 2348 next_block = AdvanceSuccessors(); 2349 if (next_block != NULL) { 2350 PostorderProcessor* result = Push(zone); 2351 return result->SetupSuccessors(zone, next_block, 2352 loop_header_, visited); 2353 } 2354 break; 2355 case NONE: 2356 return NULL; 2357 } 2358 return NULL; 2359 } 2360 2361 // The following two methods implement a "foreach b in successors" cycle. 2362 void InitializeSuccessors() { 2363 loop_index = 0; 2364 loop_length = 0; 2365 successor_iterator = HSuccessorIterator(block_->end()); 2366 } 2367 2368 HBasicBlock* AdvanceSuccessors() { 2369 if (!successor_iterator.Done()) { 2370 HBasicBlock* result = successor_iterator.Current(); 2371 successor_iterator.Advance(); 2372 return result; 2373 } 2374 return NULL; 2375 } 2376 2377 // The following two methods implement a "foreach b in loop members" cycle. 2378 void InitializeLoopMembers() { 2379 loop_index = 0; 2380 loop_length = loop_->blocks()->length(); 2381 } 2382 2383 HBasicBlock* AdvanceLoopMembers() { 2384 if (loop_index < loop_length) { 2385 HBasicBlock* result = loop_->blocks()->at(loop_index); 2386 loop_index++; 2387 return result; 2388 } else { 2389 return NULL; 2390 } 2391 } 2392 2393 LoopKind kind_; 2394 PostorderProcessor* father_; 2395 PostorderProcessor* child_; 2396 HLoopInformation* loop_; 2397 HBasicBlock* block_; 2398 HBasicBlock* loop_header_; 2399 int loop_index; 2400 int loop_length; 2401 HSuccessorIterator successor_iterator; 2402 }; 2403 2404 2405 void HGraph::OrderBlocks() { 2406 CompilationPhase phase("H_Block ordering", info()); 2407 BitVector visited(blocks_.length(), zone()); 2408 2409 ZoneList<HBasicBlock*> reverse_result(8, zone()); 2410 HBasicBlock* start = blocks_[0]; 2411 PostorderProcessor* postorder = 2412 PostorderProcessor::CreateEntryProcessor(zone(), start, &visited); 2413 while (postorder != NULL) { 2414 postorder = postorder->PerformStep(zone(), &visited, &reverse_result); 2415 } 2416 blocks_.Rewind(0); 2417 int index = 0; 2418 for (int i = reverse_result.length() - 1; i >= 0; --i) { 2419 HBasicBlock* b = reverse_result[i]; 2420 blocks_.Add(b, zone()); 2421 b->set_block_id(index++); 2422 } 2423 } 2424 2425 2426 void HGraph::AssignDominators() { 2427 HPhase phase("H_Assign dominators", this); 2428 for (int i = 0; i < blocks_.length(); ++i) { 2429 HBasicBlock* block = blocks_[i]; 2430 if (block->IsLoopHeader()) { 2431 // Only the first predecessor of a loop header is from outside the loop. 2432 // All others are back edges, and thus cannot dominate the loop header. 2433 block->AssignCommonDominator(block->predecessors()->first()); 2434 block->AssignLoopSuccessorDominators(); 2435 } else { 2436 for (int j = blocks_[i]->predecessors()->length() - 1; j >= 0; --j) { 2437 blocks_[i]->AssignCommonDominator(blocks_[i]->predecessors()->at(j)); 2438 } 2439 } 2440 } 2441 } 2442 2443 2444 bool HGraph::CheckArgumentsPhiUses() { 2445 int block_count = blocks_.length(); 2446 for (int i = 0; i < block_count; ++i) { 2447 for (int j = 0; j < blocks_[i]->phis()->length(); ++j) { 2448 HPhi* phi = blocks_[i]->phis()->at(j); 2449 // We don't support phi uses of arguments for now. 2450 if (phi->CheckFlag(HValue::kIsArguments)) return false; 2451 } 2452 } 2453 return true; 2454 } 2455 2456 2457 bool HGraph::CheckConstPhiUses() { 2458 int block_count = blocks_.length(); 2459 for (int i = 0; i < block_count; ++i) { 2460 for (int j = 0; j < blocks_[i]->phis()->length(); ++j) { 2461 HPhi* phi = blocks_[i]->phis()->at(j); 2462 // Check for the hole value (from an uninitialized const). 2463 for (int k = 0; k < phi->OperandCount(); k++) { 2464 if (phi->OperandAt(k) == GetConstantHole()) return false; 2465 } 2466 } 2467 } 2468 return true; 2469 } 2470 2471 2472 void HGraph::CollectPhis() { 2473 int block_count = blocks_.length(); 2474 phi_list_ = new(zone()) ZoneList<HPhi*>(block_count, zone()); 2475 for (int i = 0; i < block_count; ++i) { 2476 for (int j = 0; j < blocks_[i]->phis()->length(); ++j) { 2477 HPhi* phi = blocks_[i]->phis()->at(j); 2478 phi_list_->Add(phi, zone()); 2479 } 2480 } 2481 } 2482 2483 2484 // Implementation of utility class to encapsulate the translation state for 2485 // a (possibly inlined) function. 2486 FunctionState::FunctionState(HOptimizedGraphBuilder* owner, 2487 CompilationInfo* info, 2488 InliningKind inlining_kind) 2489 : owner_(owner), 2490 compilation_info_(info), 2491 call_context_(NULL), 2492 inlining_kind_(inlining_kind), 2493 function_return_(NULL), 2494 test_context_(NULL), 2495 entry_(NULL), 2496 arguments_object_(NULL), 2497 arguments_elements_(NULL), 2498 outer_(owner->function_state()) { 2499 if (outer_ != NULL) { 2500 // State for an inline function. 2501 if (owner->ast_context()->IsTest()) { 2502 HBasicBlock* if_true = owner->graph()->CreateBasicBlock(); 2503 HBasicBlock* if_false = owner->graph()->CreateBasicBlock(); 2504 if_true->MarkAsInlineReturnTarget(owner->current_block()); 2505 if_false->MarkAsInlineReturnTarget(owner->current_block()); 2506 TestContext* outer_test_context = TestContext::cast(owner->ast_context()); 2507 Expression* cond = outer_test_context->condition(); 2508 // The AstContext constructor pushed on the context stack. This newed 2509 // instance is the reason that AstContext can't be BASE_EMBEDDED. 2510 test_context_ = new TestContext(owner, cond, if_true, if_false); 2511 } else { 2512 function_return_ = owner->graph()->CreateBasicBlock(); 2513 function_return()->MarkAsInlineReturnTarget(owner->current_block()); 2514 } 2515 // Set this after possibly allocating a new TestContext above. 2516 call_context_ = owner->ast_context(); 2517 } 2518 2519 // Push on the state stack. 2520 owner->set_function_state(this); 2521 } 2522 2523 2524 FunctionState::~FunctionState() { 2525 delete test_context_; 2526 owner_->set_function_state(outer_); 2527 } 2528 2529 2530 // Implementation of utility classes to represent an expression's context in 2531 // the AST. 2532 AstContext::AstContext(HOptimizedGraphBuilder* owner, Expression::Context kind) 2533 : owner_(owner), 2534 kind_(kind), 2535 outer_(owner->ast_context()), 2536 for_typeof_(false) { 2537 owner->set_ast_context(this); // Push. 2538 #ifdef DEBUG 2539 ASSERT(owner->environment()->frame_type() == JS_FUNCTION); 2540 original_length_ = owner->environment()->length(); 2541 #endif 2542 } 2543 2544 2545 AstContext::~AstContext() { 2546 owner_->set_ast_context(outer_); // Pop. 2547 } 2548 2549 2550 EffectContext::~EffectContext() { 2551 ASSERT(owner()->HasStackOverflow() || 2552 owner()->current_block() == NULL || 2553 (owner()->environment()->length() == original_length_ && 2554 owner()->environment()->frame_type() == JS_FUNCTION)); 2555 } 2556 2557 2558 ValueContext::~ValueContext() { 2559 ASSERT(owner()->HasStackOverflow() || 2560 owner()->current_block() == NULL || 2561 (owner()->environment()->length() == original_length_ + 1 && 2562 owner()->environment()->frame_type() == JS_FUNCTION)); 2563 } 2564 2565 2566 void EffectContext::ReturnValue(HValue* value) { 2567 // The value is simply ignored. 2568 } 2569 2570 2571 void ValueContext::ReturnValue(HValue* value) { 2572 // The value is tracked in the bailout environment, and communicated 2573 // through the environment as the result of the expression. 2574 if (!arguments_allowed() && value->CheckFlag(HValue::kIsArguments)) { 2575 owner()->Bailout(kBadValueContextForArgumentsValue); 2576 } 2577 owner()->Push(value); 2578 } 2579 2580 2581 void TestContext::ReturnValue(HValue* value) { 2582 BuildBranch(value); 2583 } 2584 2585 2586 void EffectContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) { 2587 ASSERT(!instr->IsControlInstruction()); 2588 owner()->AddInstruction(instr); 2589 if (instr->HasObservableSideEffects()) { 2590 owner()->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE); 2591 } 2592 } 2593 2594 2595 void EffectContext::ReturnControl(HControlInstruction* instr, 2596 BailoutId ast_id) { 2597 ASSERT(!instr->HasObservableSideEffects()); 2598 HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock(); 2599 HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock(); 2600 instr->SetSuccessorAt(0, empty_true); 2601 instr->SetSuccessorAt(1, empty_false); 2602 owner()->current_block()->Finish(instr); 2603 HBasicBlock* join = owner()->CreateJoin(empty_true, empty_false, ast_id); 2604 owner()->set_current_block(join); 2605 } 2606 2607 2608 void EffectContext::ReturnContinuation(HIfContinuation* continuation, 2609 BailoutId ast_id) { 2610 HBasicBlock* true_branch = NULL; 2611 HBasicBlock* false_branch = NULL; 2612 continuation->Continue(&true_branch, &false_branch, NULL); 2613 if (!continuation->IsTrueReachable()) { 2614 owner()->set_current_block(false_branch); 2615 } else if (!continuation->IsFalseReachable()) { 2616 owner()->set_current_block(true_branch); 2617 } else { 2618 HBasicBlock* join = owner()->CreateJoin(true_branch, false_branch, ast_id); 2619 owner()->set_current_block(join); 2620 } 2621 } 2622 2623 2624 void ValueContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) { 2625 ASSERT(!instr->IsControlInstruction()); 2626 if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) { 2627 return owner()->Bailout(kBadValueContextForArgumentsObjectValue); 2628 } 2629 owner()->AddInstruction(instr); 2630 owner()->Push(instr); 2631 if (instr->HasObservableSideEffects()) { 2632 owner()->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE); 2633 } 2634 } 2635 2636 2637 void ValueContext::ReturnControl(HControlInstruction* instr, BailoutId ast_id) { 2638 ASSERT(!instr->HasObservableSideEffects()); 2639 if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) { 2640 return owner()->Bailout(kBadValueContextForArgumentsObjectValue); 2641 } 2642 HBasicBlock* materialize_false = owner()->graph()->CreateBasicBlock(); 2643 HBasicBlock* materialize_true = owner()->graph()->CreateBasicBlock(); 2644 instr->SetSuccessorAt(0, materialize_true); 2645 instr->SetSuccessorAt(1, materialize_false); 2646 owner()->current_block()->Finish(instr); 2647 owner()->set_current_block(materialize_true); 2648 owner()->Push(owner()->graph()->GetConstantTrue()); 2649 owner()->set_current_block(materialize_false); 2650 owner()->Push(owner()->graph()->GetConstantFalse()); 2651 HBasicBlock* join = 2652 owner()->CreateJoin(materialize_true, materialize_false, ast_id); 2653 owner()->set_current_block(join); 2654 } 2655 2656 2657 void ValueContext::ReturnContinuation(HIfContinuation* continuation, 2658 BailoutId ast_id) { 2659 HBasicBlock* materialize_true = NULL; 2660 HBasicBlock* materialize_false = NULL; 2661 continuation->Continue(&materialize_true, &materialize_false, NULL); 2662 if (continuation->IsTrueReachable()) { 2663 owner()->set_current_block(materialize_true); 2664 owner()->Push(owner()->graph()->GetConstantTrue()); 2665 owner()->set_current_block(materialize_true); 2666 } 2667 if (continuation->IsFalseReachable()) { 2668 owner()->set_current_block(materialize_false); 2669 owner()->Push(owner()->graph()->GetConstantFalse()); 2670 owner()->set_current_block(materialize_false); 2671 } 2672 if (continuation->TrueAndFalseReachable()) { 2673 HBasicBlock* join = 2674 owner()->CreateJoin(materialize_true, materialize_false, ast_id); 2675 owner()->set_current_block(join); 2676 } 2677 } 2678 2679 2680 void TestContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) { 2681 ASSERT(!instr->IsControlInstruction()); 2682 HOptimizedGraphBuilder* builder = owner(); 2683 builder->AddInstruction(instr); 2684 // We expect a simulate after every expression with side effects, though 2685 // this one isn't actually needed (and wouldn't work if it were targeted). 2686 if (instr->HasObservableSideEffects()) { 2687 builder->Push(instr); 2688 builder->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE); 2689 builder->Pop(); 2690 } 2691 BuildBranch(instr); 2692 } 2693 2694 2695 void TestContext::ReturnControl(HControlInstruction* instr, BailoutId ast_id) { 2696 ASSERT(!instr->HasObservableSideEffects()); 2697 HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock(); 2698 HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock(); 2699 instr->SetSuccessorAt(0, empty_true); 2700 instr->SetSuccessorAt(1, empty_false); 2701 owner()->current_block()->Finish(instr); 2702 empty_true->Goto(if_true(), owner()->function_state()); 2703 empty_false->Goto(if_false(), owner()->function_state()); 2704 owner()->set_current_block(NULL); 2705 } 2706 2707 2708 void TestContext::ReturnContinuation(HIfContinuation* continuation, 2709 BailoutId ast_id) { 2710 HBasicBlock* true_branch = NULL; 2711 HBasicBlock* false_branch = NULL; 2712 continuation->Continue(&true_branch, &false_branch, NULL); 2713 if (continuation->IsTrueReachable()) { 2714 true_branch->Goto(if_true(), owner()->function_state()); 2715 } 2716 if (continuation->IsFalseReachable()) { 2717 false_branch->Goto(if_false(), owner()->function_state()); 2718 } 2719 owner()->set_current_block(NULL); 2720 } 2721 2722 2723 void TestContext::BuildBranch(HValue* value) { 2724 // We expect the graph to be in edge-split form: there is no edge that 2725 // connects a branch node to a join node. We conservatively ensure that 2726 // property by always adding an empty block on the outgoing edges of this 2727 // branch. 2728 HOptimizedGraphBuilder* builder = owner(); 2729 if (value != NULL && value->CheckFlag(HValue::kIsArguments)) { 2730 builder->Bailout(kArgumentsObjectValueInATestContext); 2731 } 2732 HBasicBlock* empty_true = builder->graph()->CreateBasicBlock(); 2733 HBasicBlock* empty_false = builder->graph()->CreateBasicBlock(); 2734 ToBooleanStub::Types expected(condition()->to_boolean_types()); 2735 HBranch* test = new(zone()) HBranch(value, expected, empty_true, empty_false); 2736 builder->current_block()->Finish(test); 2737 2738 empty_true->Goto(if_true(), builder->function_state()); 2739 empty_false->Goto(if_false(), builder->function_state()); 2740 builder->set_current_block(NULL); 2741 } 2742 2743 2744 // HOptimizedGraphBuilder infrastructure for bailing out and checking bailouts. 2745 #define CHECK_BAILOUT(call) \ 2746 do { \ 2747 call; \ 2748 if (HasStackOverflow()) return; \ 2749 } while (false) 2750 2751 2752 #define CHECK_ALIVE(call) \ 2753 do { \ 2754 call; \ 2755 if (HasStackOverflow() || current_block() == NULL) return; \ 2756 } while (false) 2757 2758 2759 #define CHECK_ALIVE_OR_RETURN(call, value) \ 2760 do { \ 2761 call; \ 2762 if (HasStackOverflow() || current_block() == NULL) return value; \ 2763 } while (false) 2764 2765 2766 void HOptimizedGraphBuilder::Bailout(BailoutReason reason) { 2767 current_info()->set_bailout_reason(reason); 2768 SetStackOverflow(); 2769 } 2770 2771 2772 void HOptimizedGraphBuilder::VisitForEffect(Expression* expr) { 2773 EffectContext for_effect(this); 2774 Visit(expr); 2775 } 2776 2777 2778 void HOptimizedGraphBuilder::VisitForValue(Expression* expr, 2779 ArgumentsAllowedFlag flag) { 2780 ValueContext for_value(this, flag); 2781 Visit(expr); 2782 } 2783 2784 2785 void HOptimizedGraphBuilder::VisitForTypeOf(Expression* expr) { 2786 ValueContext for_value(this, ARGUMENTS_NOT_ALLOWED); 2787 for_value.set_for_typeof(true); 2788 Visit(expr); 2789 } 2790 2791 2792 2793 void HOptimizedGraphBuilder::VisitForControl(Expression* expr, 2794 HBasicBlock* true_block, 2795 HBasicBlock* false_block) { 2796 TestContext for_test(this, expr, true_block, false_block); 2797 Visit(expr); 2798 } 2799 2800 2801 void HOptimizedGraphBuilder::VisitArgument(Expression* expr) { 2802 CHECK_ALIVE(VisitForValue(expr)); 2803 Push(Add<HPushArgument>(Pop())); 2804 } 2805 2806 2807 void HOptimizedGraphBuilder::VisitArgumentList( 2808 ZoneList<Expression*>* arguments) { 2809 for (int i = 0; i < arguments->length(); i++) { 2810 CHECK_ALIVE(VisitArgument(arguments->at(i))); 2811 } 2812 } 2813 2814 2815 void HOptimizedGraphBuilder::VisitExpressions( 2816 ZoneList<Expression*>* exprs) { 2817 for (int i = 0; i < exprs->length(); ++i) { 2818 CHECK_ALIVE(VisitForValue(exprs->at(i))); 2819 } 2820 } 2821 2822 2823 bool HOptimizedGraphBuilder::BuildGraph() { 2824 if (current_info()->function()->is_generator()) { 2825 Bailout(kFunctionIsAGenerator); 2826 return false; 2827 } 2828 Scope* scope = current_info()->scope(); 2829 if (scope->HasIllegalRedeclaration()) { 2830 Bailout(kFunctionWithIllegalRedeclaration); 2831 return false; 2832 } 2833 if (scope->calls_eval()) { 2834 Bailout(kFunctionCallsEval); 2835 return false; 2836 } 2837 SetUpScope(scope); 2838 2839 // Add an edge to the body entry. This is warty: the graph's start 2840 // environment will be used by the Lithium translation as the initial 2841 // environment on graph entry, but it has now been mutated by the 2842 // Hydrogen translation of the instructions in the start block. This 2843 // environment uses values which have not been defined yet. These 2844 // Hydrogen instructions will then be replayed by the Lithium 2845 // translation, so they cannot have an environment effect. The edge to 2846 // the body's entry block (along with some special logic for the start 2847 // block in HInstruction::InsertAfter) seals the start block from 2848 // getting unwanted instructions inserted. 2849 // 2850 // TODO(kmillikin): Fix this. Stop mutating the initial environment. 2851 // Make the Hydrogen instructions in the initial block into Hydrogen 2852 // values (but not instructions), present in the initial environment and 2853 // not replayed by the Lithium translation. 2854 HEnvironment* initial_env = environment()->CopyWithoutHistory(); 2855 HBasicBlock* body_entry = CreateBasicBlock(initial_env); 2856 current_block()->Goto(body_entry); 2857 body_entry->SetJoinId(BailoutId::FunctionEntry()); 2858 set_current_block(body_entry); 2859 2860 // Handle implicit declaration of the function name in named function 2861 // expressions before other declarations. 2862 if (scope->is_function_scope() && scope->function() != NULL) { 2863 VisitVariableDeclaration(scope->function()); 2864 } 2865 VisitDeclarations(scope->declarations()); 2866 Add<HSimulate>(BailoutId::Declarations()); 2867 2868 HValue* context = environment()->context(); 2869 Add<HStackCheck>(context, HStackCheck::kFunctionEntry); 2870 2871 VisitStatements(current_info()->function()->body()); 2872 if (HasStackOverflow()) return false; 2873 2874 if (current_block() != NULL) { 2875 Add<HReturn>(graph()->GetConstantUndefined()); 2876 set_current_block(NULL); 2877 } 2878 2879 // If the checksum of the number of type info changes is the same as the 2880 // last time this function was compiled, then this recompile is likely not 2881 // due to missing/inadequate type feedback, but rather too aggressive 2882 // optimization. Disable optimistic LICM in that case. 2883 Handle<Code> unoptimized_code(current_info()->shared_info()->code()); 2884 ASSERT(unoptimized_code->kind() == Code::FUNCTION); 2885 Handle<TypeFeedbackInfo> type_info( 2886 TypeFeedbackInfo::cast(unoptimized_code->type_feedback_info())); 2887 int checksum = type_info->own_type_change_checksum(); 2888 int composite_checksum = graph()->update_type_change_checksum(checksum); 2889 graph()->set_use_optimistic_licm( 2890 !type_info->matches_inlined_type_change_checksum(composite_checksum)); 2891 type_info->set_inlined_type_change_checksum(composite_checksum); 2892 2893 // Perform any necessary OSR-specific cleanups or changes to the graph. 2894 osr_->FinishGraph(); 2895 2896 return true; 2897 } 2898 2899 2900 bool HGraph::Optimize(BailoutReason* bailout_reason) { 2901 OrderBlocks(); 2902 AssignDominators(); 2903 2904 // We need to create a HConstant "zero" now so that GVN will fold every 2905 // zero-valued constant in the graph together. 2906 // The constant is needed to make idef-based bounds check work: the pass 2907 // evaluates relations with "zero" and that zero cannot be created after GVN. 2908 GetConstant0(); 2909 2910 #ifdef DEBUG 2911 // Do a full verify after building the graph and computing dominators. 2912 Verify(true); 2913 #endif 2914 2915 if (FLAG_analyze_environment_liveness && maximum_environment_size() != 0) { 2916 Run<HEnvironmentLivenessAnalysisPhase>(); 2917 } 2918 2919 Run<HPropagateDeoptimizingMarkPhase>(); 2920 if (!CheckConstPhiUses()) { 2921 *bailout_reason = kUnsupportedPhiUseOfConstVariable; 2922 return false; 2923 } 2924 Run<HRedundantPhiEliminationPhase>(); 2925 if (!CheckArgumentsPhiUses()) { 2926 *bailout_reason = kUnsupportedPhiUseOfArguments; 2927 return false; 2928 } 2929 2930 // Remove dead code and phis 2931 if (FLAG_dead_code_elimination) Run<HDeadCodeEliminationPhase>(); 2932 2933 if (FLAG_use_escape_analysis) Run<HEscapeAnalysisPhase>(); 2934 2935 CollectPhis(); 2936 2937 if (has_osr()) osr()->FinishOsrValues(); 2938 2939 Run<HInferRepresentationPhase>(); 2940 2941 // Remove HSimulate instructions that have turned out not to be needed 2942 // after all by folding them into the following HSimulate. 2943 // This must happen after inferring representations. 2944 Run<HMergeRemovableSimulatesPhase>(); 2945 2946 Run<HMarkDeoptimizeOnUndefinedPhase>(); 2947 Run<HRepresentationChangesPhase>(); 2948 2949 Run<HInferTypesPhase>(); 2950 2951 // Must be performed before canonicalization to ensure that Canonicalize 2952 // will not remove semantically meaningful ToInt32 operations e.g. BIT_OR with 2953 // zero. 2954 if (FLAG_opt_safe_uint32_operations) Run<HUint32AnalysisPhase>(); 2955 2956 if (FLAG_use_canonicalizing) Run<HCanonicalizePhase>(); 2957 2958 if (FLAG_use_gvn) Run<HGlobalValueNumberingPhase>(); 2959 2960 if (FLAG_use_range) Run<HRangeAnalysisPhase>(); 2961 2962 Run<HComputeChangeUndefinedToNaN>(); 2963 Run<HComputeMinusZeroChecksPhase>(); 2964 2965 // Eliminate redundant stack checks on backwards branches. 2966 Run<HStackCheckEliminationPhase>(); 2967 2968 if (FLAG_array_bounds_checks_elimination) { 2969 Run<HBoundsCheckEliminationPhase>(); 2970 } 2971 if (FLAG_array_bounds_checks_hoisting) { 2972 Run<HBoundsCheckHoistingPhase>(); 2973 } 2974 if (FLAG_array_index_dehoisting) Run<HDehoistIndexComputationsPhase>(); 2975 if (FLAG_dead_code_elimination) Run<HDeadCodeEliminationPhase>(); 2976 2977 RestoreActualValues(); 2978 2979 return true; 2980 } 2981 2982 2983 void HGraph::RestoreActualValues() { 2984 HPhase phase("H_Restore actual values", this); 2985 2986 for (int block_index = 0; block_index < blocks()->length(); block_index++) { 2987 HBasicBlock* block = blocks()->at(block_index); 2988 2989 #ifdef DEBUG 2990 for (int i = 0; i < block->phis()->length(); i++) { 2991 HPhi* phi = block->phis()->at(i); 2992 ASSERT(phi->ActualValue() == phi); 2993 } 2994 #endif 2995 2996 for (HInstructionIterator it(block); !it.Done(); it.Advance()) { 2997 HInstruction* instruction = it.Current(); 2998 if (instruction->ActualValue() != instruction) { 2999 ASSERT(instruction->IsInformativeDefinition()); 3000 if (instruction->IsPurelyInformativeDefinition()) { 3001 instruction->DeleteAndReplaceWith(instruction->RedefinedOperand()); 3002 } else { 3003 instruction->ReplaceAllUsesWith(instruction->ActualValue()); 3004 } 3005 } 3006 } 3007 } 3008 } 3009 3010 3011 void HGraphBuilder::PushAndAdd(HInstruction* instr) { 3012 Push(instr); 3013 AddInstruction(instr); 3014 } 3015 3016 3017 template <class Instruction> 3018 HInstruction* HOptimizedGraphBuilder::PreProcessCall(Instruction* call) { 3019 int count = call->argument_count(); 3020 ZoneList<HValue*> arguments(count, zone()); 3021 for (int i = 0; i < count; ++i) { 3022 arguments.Add(Pop(), zone()); 3023 } 3024 3025 while (!arguments.is_empty()) { 3026 Add<HPushArgument>(arguments.RemoveLast()); 3027 } 3028 return call; 3029 } 3030 3031 3032 void HOptimizedGraphBuilder::SetUpScope(Scope* scope) { 3033 // First special is HContext. 3034 HInstruction* context = Add<HContext>(); 3035 environment()->BindContext(context); 3036 3037 HConstant* undefined_constant = HConstant::cast(Add<HConstant>( 3038 isolate()->factory()->undefined_value())); 3039 graph()->set_undefined_constant(undefined_constant); 3040 3041 // Create an arguments object containing the initial parameters. Set the 3042 // initial values of parameters including "this" having parameter index 0. 3043 ASSERT_EQ(scope->num_parameters() + 1, environment()->parameter_count()); 3044 HArgumentsObject* arguments_object = 3045 New<HArgumentsObject>(environment()->parameter_count()); 3046 for (int i = 0; i < environment()->parameter_count(); ++i) { 3047 HInstruction* parameter = Add<HParameter>(i); 3048 arguments_object->AddArgument(parameter, zone()); 3049 environment()->Bind(i, parameter); 3050 } 3051 AddInstruction(arguments_object); 3052 graph()->SetArgumentsObject(arguments_object); 3053 3054 // Initialize specials and locals to undefined. 3055 for (int i = environment()->parameter_count() + 1; 3056 i < environment()->length(); 3057 ++i) { 3058 environment()->Bind(i, undefined_constant); 3059 } 3060 3061 // Handle the arguments and arguments shadow variables specially (they do 3062 // not have declarations). 3063 if (scope->arguments() != NULL) { 3064 if (!scope->arguments()->IsStackAllocated()) { 3065 return Bailout(kContextAllocatedArguments); 3066 } 3067 3068 environment()->Bind(scope->arguments(), 3069 graph()->GetArgumentsObject()); 3070 } 3071 } 3072 3073 3074 void HOptimizedGraphBuilder::VisitStatements(ZoneList<Statement*>* statements) { 3075 for (int i = 0; i < statements->length(); i++) { 3076 Statement* stmt = statements->at(i); 3077 CHECK_ALIVE(Visit(stmt)); 3078 if (stmt->IsJump()) break; 3079 } 3080 } 3081 3082 3083 void HOptimizedGraphBuilder::VisitBlock(Block* stmt) { 3084 ASSERT(!HasStackOverflow()); 3085 ASSERT(current_block() != NULL); 3086 ASSERT(current_block()->HasPredecessor()); 3087 if (stmt->scope() != NULL) { 3088 return Bailout(kScopedBlock); 3089 } 3090 BreakAndContinueInfo break_info(stmt); 3091 { BreakAndContinueScope push(&break_info, this); 3092 CHECK_BAILOUT(VisitStatements(stmt->statements())); 3093 } 3094 HBasicBlock* break_block = break_info.break_block(); 3095 if (break_block != NULL) { 3096 if (current_block() != NULL) current_block()->Goto(break_block); 3097 break_block->SetJoinId(stmt->ExitId()); 3098 set_current_block(break_block); 3099 } 3100 } 3101 3102 3103 void HOptimizedGraphBuilder::VisitExpressionStatement( 3104 ExpressionStatement* stmt) { 3105 ASSERT(!HasStackOverflow()); 3106 ASSERT(current_block() != NULL); 3107 ASSERT(current_block()->HasPredecessor()); 3108 VisitForEffect(stmt->expression()); 3109 } 3110 3111 3112 void HOptimizedGraphBuilder::VisitEmptyStatement(EmptyStatement* stmt) { 3113 ASSERT(!HasStackOverflow()); 3114 ASSERT(current_block() != NULL); 3115 ASSERT(current_block()->HasPredecessor()); 3116 } 3117 3118 3119 void HOptimizedGraphBuilder::VisitIfStatement(IfStatement* stmt) { 3120 ASSERT(!HasStackOverflow()); 3121 ASSERT(current_block() != NULL); 3122 ASSERT(current_block()->HasPredecessor()); 3123 if (stmt->condition()->ToBooleanIsTrue()) { 3124 Add<HSimulate>(stmt->ThenId()); 3125 Visit(stmt->then_statement()); 3126 } else if (stmt->condition()->ToBooleanIsFalse()) { 3127 Add<HSimulate>(stmt->ElseId()); 3128 Visit(stmt->else_statement()); 3129 } else { 3130 HBasicBlock* cond_true = graph()->CreateBasicBlock(); 3131 HBasicBlock* cond_false = graph()->CreateBasicBlock(); 3132 CHECK_BAILOUT(VisitForControl(stmt->condition(), cond_true, cond_false)); 3133 3134 if (cond_true->HasPredecessor()) { 3135 cond_true->SetJoinId(stmt->ThenId()); 3136 set_current_block(cond_true); 3137 CHECK_BAILOUT(Visit(stmt->then_statement())); 3138 cond_true = current_block(); 3139 } else { 3140 cond_true = NULL; 3141 } 3142 3143 if (cond_false->HasPredecessor()) { 3144 cond_false->SetJoinId(stmt->ElseId()); 3145 set_current_block(cond_false); 3146 CHECK_BAILOUT(Visit(stmt->else_statement())); 3147 cond_false = current_block(); 3148 } else { 3149 cond_false = NULL; 3150 } 3151 3152 HBasicBlock* join = CreateJoin(cond_true, cond_false, stmt->IfId()); 3153 set_current_block(join); 3154 } 3155 } 3156 3157 3158 HBasicBlock* HOptimizedGraphBuilder::BreakAndContinueScope::Get( 3159 BreakableStatement* stmt, 3160 BreakType type, 3161 int* drop_extra) { 3162 *drop_extra = 0; 3163 BreakAndContinueScope* current = this; 3164 while (current != NULL && current->info()->target() != stmt) { 3165 *drop_extra += current->info()->drop_extra(); 3166 current = current->next(); 3167 } 3168 ASSERT(current != NULL); // Always found (unless stack is malformed). 3169 3170 if (type == BREAK) { 3171 *drop_extra += current->info()->drop_extra(); 3172 } 3173 3174 HBasicBlock* block = NULL; 3175 switch (type) { 3176 case BREAK: 3177 block = current->info()->break_block(); 3178 if (block == NULL) { 3179 block = current->owner()->graph()->CreateBasicBlock(); 3180 current->info()->set_break_block(block); 3181 } 3182 break; 3183 3184 case CONTINUE: 3185 block = current->info()->continue_block(); 3186 if (block == NULL) { 3187 block = current->owner()->graph()->CreateBasicBlock(); 3188 current->info()->set_continue_block(block); 3189 } 3190 break; 3191 } 3192 3193 return block; 3194 } 3195 3196 3197 void HOptimizedGraphBuilder::VisitContinueStatement( 3198 ContinueStatement* stmt) { 3199 ASSERT(!HasStackOverflow()); 3200 ASSERT(current_block() != NULL); 3201 ASSERT(current_block()->HasPredecessor()); 3202 int drop_extra = 0; 3203 HBasicBlock* continue_block = break_scope()->Get( 3204 stmt->target(), BreakAndContinueScope::CONTINUE, &drop_extra); 3205 Drop(drop_extra); 3206 current_block()->Goto(continue_block); 3207 set_current_block(NULL); 3208 } 3209 3210 3211 void HOptimizedGraphBuilder::VisitBreakStatement(BreakStatement* stmt) { 3212 ASSERT(!HasStackOverflow()); 3213 ASSERT(current_block() != NULL); 3214 ASSERT(current_block()->HasPredecessor()); 3215 int drop_extra = 0; 3216 HBasicBlock* break_block = break_scope()->Get( 3217 stmt->target(), BreakAndContinueScope::BREAK, &drop_extra); 3218 Drop(drop_extra); 3219 current_block()->Goto(break_block); 3220 set_current_block(NULL); 3221 } 3222 3223 3224 void HOptimizedGraphBuilder::VisitReturnStatement(ReturnStatement* stmt) { 3225 ASSERT(!HasStackOverflow()); 3226 ASSERT(current_block() != NULL); 3227 ASSERT(current_block()->HasPredecessor()); 3228 FunctionState* state = function_state(); 3229 AstContext* context = call_context(); 3230 if (context == NULL) { 3231 // Not an inlined return, so an actual one. 3232 CHECK_ALIVE(VisitForValue(stmt->expression())); 3233 HValue* result = environment()->Pop(); 3234 Add<HReturn>(result); 3235 } else if (state->inlining_kind() == CONSTRUCT_CALL_RETURN) { 3236 // Return from an inlined construct call. In a test context the return value 3237 // will always evaluate to true, in a value context the return value needs 3238 // to be a JSObject. 3239 if (context->IsTest()) { 3240 TestContext* test = TestContext::cast(context); 3241 CHECK_ALIVE(VisitForEffect(stmt->expression())); 3242 current_block()->Goto(test->if_true(), state); 3243 } else if (context->IsEffect()) { 3244 CHECK_ALIVE(VisitForEffect(stmt->expression())); 3245 current_block()->Goto(function_return(), state); 3246 } else { 3247 ASSERT(context->IsValue()); 3248 CHECK_ALIVE(VisitForValue(stmt->expression())); 3249 HValue* return_value = Pop(); 3250 HValue* receiver = environment()->arguments_environment()->Lookup(0); 3251 HHasInstanceTypeAndBranch* typecheck = 3252 new(zone()) HHasInstanceTypeAndBranch(return_value, 3253 FIRST_SPEC_OBJECT_TYPE, 3254 LAST_SPEC_OBJECT_TYPE); 3255 HBasicBlock* if_spec_object = graph()->CreateBasicBlock(); 3256 HBasicBlock* not_spec_object = graph()->CreateBasicBlock(); 3257 typecheck->SetSuccessorAt(0, if_spec_object); 3258 typecheck->SetSuccessorAt(1, not_spec_object); 3259 current_block()->Finish(typecheck); 3260 if_spec_object->AddLeaveInlined(return_value, state); 3261 not_spec_object->AddLeaveInlined(receiver, state); 3262 } 3263 } else if (state->inlining_kind() == SETTER_CALL_RETURN) { 3264 // Return from an inlined setter call. The returned value is never used, the 3265 // value of an assignment is always the value of the RHS of the assignment. 3266 CHECK_ALIVE(VisitForEffect(stmt->expression())); 3267 if (context->IsTest()) { 3268 HValue* rhs = environment()->arguments_environment()->Lookup(1); 3269 context->ReturnValue(rhs); 3270 } else if (context->IsEffect()) { 3271 current_block()->Goto(function_return(), state); 3272 } else { 3273 ASSERT(context->IsValue()); 3274 HValue* rhs = environment()->arguments_environment()->Lookup(1); 3275 current_block()->AddLeaveInlined(rhs, state); 3276 } 3277 } else { 3278 // Return from a normal inlined function. Visit the subexpression in the 3279 // expression context of the call. 3280 if (context->IsTest()) { 3281 TestContext* test = TestContext::cast(context); 3282 VisitForControl(stmt->expression(), test->if_true(), test->if_false()); 3283 } else if (context->IsEffect()) { 3284 CHECK_ALIVE(VisitForEffect(stmt->expression())); 3285 current_block()->Goto(function_return(), state); 3286 } else { 3287 ASSERT(context->IsValue()); 3288 CHECK_ALIVE(VisitForValue(stmt->expression())); 3289 current_block()->AddLeaveInlined(Pop(), state); 3290 } 3291 } 3292 set_current_block(NULL); 3293 } 3294 3295 3296 void HOptimizedGraphBuilder::VisitWithStatement(WithStatement* stmt) { 3297 ASSERT(!HasStackOverflow()); 3298 ASSERT(current_block() != NULL); 3299 ASSERT(current_block()->HasPredecessor()); 3300 return Bailout(kWithStatement); 3301 } 3302 3303 3304 void HOptimizedGraphBuilder::VisitSwitchStatement(SwitchStatement* stmt) { 3305 ASSERT(!HasStackOverflow()); 3306 ASSERT(current_block() != NULL); 3307 ASSERT(current_block()->HasPredecessor()); 3308 3309 // We only optimize switch statements with smi-literal smi comparisons, 3310 // with a bounded number of clauses. 3311 const int kCaseClauseLimit = 128; 3312 ZoneList<CaseClause*>* clauses = stmt->cases(); 3313 int clause_count = clauses->length(); 3314 if (clause_count > kCaseClauseLimit) { 3315 return Bailout(kSwitchStatementTooManyClauses); 3316 } 3317 3318 ASSERT(stmt->switch_type() != SwitchStatement::UNKNOWN_SWITCH); 3319 if (stmt->switch_type() == SwitchStatement::GENERIC_SWITCH) { 3320 return Bailout(kSwitchStatementMixedOrNonLiteralSwitchLabels); 3321 } 3322 3323 HValue* context = environment()->context(); 3324 3325 CHECK_ALIVE(VisitForValue(stmt->tag())); 3326 Add<HSimulate>(stmt->EntryId()); 3327 HValue* tag_value = Pop(); 3328 HBasicBlock* first_test_block = current_block(); 3329 3330 HUnaryControlInstruction* string_check = NULL; 3331 HBasicBlock* not_string_block = NULL; 3332 3333 // Test switch's tag value if all clauses are string literals 3334 if (stmt->switch_type() == SwitchStatement::STRING_SWITCH) { 3335 string_check = new(zone()) HIsStringAndBranch(tag_value); 3336 first_test_block = graph()->CreateBasicBlock(); 3337 not_string_block = graph()->CreateBasicBlock(); 3338 3339 string_check->SetSuccessorAt(0, first_test_block); 3340 string_check->SetSuccessorAt(1, not_string_block); 3341 current_block()->Finish(string_check); 3342 3343 set_current_block(first_test_block); 3344 } 3345 3346 // 1. Build all the tests, with dangling true branches 3347 BailoutId default_id = BailoutId::None(); 3348 for (int i = 0; i < clause_count; ++i) { 3349 CaseClause* clause = clauses->at(i); 3350 if (clause->is_default()) { 3351 default_id = clause->EntryId(); 3352 continue; 3353 } 3354 3355 // Generate a compare and branch. 3356 CHECK_ALIVE(VisitForValue(clause->label())); 3357 HValue* label_value = Pop(); 3358 3359 HBasicBlock* next_test_block = graph()->CreateBasicBlock(); 3360 HBasicBlock* body_block = graph()->CreateBasicBlock(); 3361 3362 HControlInstruction* compare; 3363 3364 if (stmt->switch_type() == SwitchStatement::SMI_SWITCH) { 3365 if (!clause->compare_type()->Is(Type::Smi())) { 3366 Add<HDeoptimize>("Non-smi switch type", Deoptimizer::SOFT); 3367 } 3368 3369 HCompareNumericAndBranch* compare_ = 3370 new(zone()) HCompareNumericAndBranch(tag_value, 3371 label_value, 3372 Token::EQ_STRICT); 3373 compare_->set_observed_input_representation( 3374 Representation::Smi(), Representation::Smi()); 3375 compare = compare_; 3376 } else { 3377 compare = new(zone()) HStringCompareAndBranch(context, tag_value, 3378 label_value, 3379 Token::EQ_STRICT); 3380 } 3381 3382 compare->SetSuccessorAt(0, body_block); 3383 compare->SetSuccessorAt(1, next_test_block); 3384 current_block()->Finish(compare); 3385 3386 set_current_block(next_test_block); 3387 } 3388 3389 // Save the current block to use for the default or to join with the 3390 // exit. 3391 HBasicBlock* last_block = current_block(); 3392 3393 if (not_string_block != NULL) { 3394 BailoutId join_id = !default_id.IsNone() ? default_id : stmt->ExitId(); 3395 last_block = CreateJoin(last_block, not_string_block, join_id); 3396 } 3397 3398 // 2. Loop over the clauses and the linked list of tests in lockstep, 3399 // translating the clause bodies. 3400 HBasicBlock* curr_test_block = first_test_block; 3401 HBasicBlock* fall_through_block = NULL; 3402 3403 BreakAndContinueInfo break_info(stmt); 3404 { BreakAndContinueScope push(&break_info, this); 3405 for (int i = 0; i < clause_count; ++i) { 3406 CaseClause* clause = clauses->at(i); 3407 3408 // Identify the block where normal (non-fall-through) control flow 3409 // goes to. 3410 HBasicBlock* normal_block = NULL; 3411 if (clause->is_default()) { 3412 if (last_block != NULL) { 3413 normal_block = last_block; 3414 last_block = NULL; // Cleared to indicate we've handled it. 3415 } 3416 } else { 3417 normal_block = curr_test_block->end()->FirstSuccessor(); 3418 curr_test_block = curr_test_block->end()->SecondSuccessor(); 3419 } 3420 3421 // Identify a block to emit the body into. 3422 if (normal_block == NULL) { 3423 if (fall_through_block == NULL) { 3424 // (a) Unreachable. 3425 if (clause->is_default()) { 3426 continue; // Might still be reachable clause bodies. 3427 } else { 3428 break; 3429 } 3430 } else { 3431 // (b) Reachable only as fall through. 3432 set_current_block(fall_through_block); 3433 } 3434 } else if (fall_through_block == NULL) { 3435 // (c) Reachable only normally. 3436 set_current_block(normal_block); 3437 } else { 3438 // (d) Reachable both ways. 3439 HBasicBlock* join = CreateJoin(fall_through_block, 3440 normal_block, 3441 clause->EntryId()); 3442 set_current_block(join); 3443 } 3444 3445 CHECK_BAILOUT(VisitStatements(clause->statements())); 3446 fall_through_block = current_block(); 3447 } 3448 } 3449 3450 // Create an up-to-3-way join. Use the break block if it exists since 3451 // it's already a join block. 3452 HBasicBlock* break_block = break_info.break_block(); 3453 if (break_block == NULL) { 3454 set_current_block(CreateJoin(fall_through_block, 3455 last_block, 3456 stmt->ExitId())); 3457 } else { 3458 if (fall_through_block != NULL) fall_through_block->Goto(break_block); 3459 if (last_block != NULL) last_block->Goto(break_block); 3460 break_block->SetJoinId(stmt->ExitId()); 3461 set_current_block(break_block); 3462 } 3463 } 3464 3465 3466 void HOptimizedGraphBuilder::VisitLoopBody(IterationStatement* stmt, 3467 HBasicBlock* loop_entry, 3468 BreakAndContinueInfo* break_info) { 3469 BreakAndContinueScope push(break_info, this); 3470 Add<HSimulate>(stmt->StackCheckId()); 3471 HValue* context = environment()->context(); 3472 HStackCheck* stack_check = HStackCheck::cast(Add<HStackCheck>( 3473 context, HStackCheck::kBackwardsBranch)); 3474 ASSERT(loop_entry->IsLoopHeader()); 3475 loop_entry->loop_information()->set_stack_check(stack_check); 3476 CHECK_BAILOUT(Visit(stmt->body())); 3477 } 3478 3479 3480 void HOptimizedGraphBuilder::VisitDoWhileStatement(DoWhileStatement* stmt) { 3481 ASSERT(!HasStackOverflow()); 3482 ASSERT(current_block() != NULL); 3483 ASSERT(current_block()->HasPredecessor()); 3484 ASSERT(current_block() != NULL); 3485 HBasicBlock* loop_entry = osr_->BuildPossibleOsrLoopEntry(stmt); 3486 3487 BreakAndContinueInfo break_info(stmt); 3488 CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info)); 3489 HBasicBlock* body_exit = 3490 JoinContinue(stmt, current_block(), break_info.continue_block()); 3491 HBasicBlock* loop_successor = NULL; 3492 if (body_exit != NULL && !stmt->cond()->ToBooleanIsTrue()) { 3493 set_current_block(body_exit); 3494 // The block for a true condition, the actual predecessor block of the 3495 // back edge. 3496 body_exit = graph()->CreateBasicBlock(); 3497 loop_successor = graph()->CreateBasicBlock(); 3498 CHECK_BAILOUT(VisitForControl(stmt->cond(), body_exit, loop_successor)); 3499 if (body_exit->HasPredecessor()) { 3500 body_exit->SetJoinId(stmt->BackEdgeId()); 3501 } else { 3502 body_exit = NULL; 3503 } 3504 if (loop_successor->HasPredecessor()) { 3505 loop_successor->SetJoinId(stmt->ExitId()); 3506 } else { 3507 loop_successor = NULL; 3508 } 3509 } 3510 HBasicBlock* loop_exit = CreateLoop(stmt, 3511 loop_entry, 3512 body_exit, 3513 loop_successor, 3514 break_info.break_block()); 3515 set_current_block(loop_exit); 3516 } 3517 3518 3519 void HOptimizedGraphBuilder::VisitWhileStatement(WhileStatement* stmt) { 3520 ASSERT(!HasStackOverflow()); 3521 ASSERT(current_block() != NULL); 3522 ASSERT(current_block()->HasPredecessor()); 3523 ASSERT(current_block() != NULL); 3524 HBasicBlock* loop_entry = osr_->BuildPossibleOsrLoopEntry(stmt); 3525 3526 // If the condition is constant true, do not generate a branch. 3527 HBasicBlock* loop_successor = NULL; 3528 if (!stmt->cond()->ToBooleanIsTrue()) { 3529 HBasicBlock* body_entry = graph()->CreateBasicBlock(); 3530 loop_successor = graph()->CreateBasicBlock(); 3531 CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor)); 3532 if (body_entry->HasPredecessor()) { 3533 body_entry->SetJoinId(stmt->BodyId()); 3534 set_current_block(body_entry); 3535 } 3536 if (loop_successor->HasPredecessor()) { 3537 loop_successor->SetJoinId(stmt->ExitId()); 3538 } else { 3539 loop_successor = NULL; 3540 } 3541 } 3542 3543 BreakAndContinueInfo break_info(stmt); 3544 if (current_block() != NULL) { 3545 CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info)); 3546 } 3547 HBasicBlock* body_exit = 3548 JoinContinue(stmt, current_block(), break_info.continue_block()); 3549 HBasicBlock* loop_exit = CreateLoop(stmt, 3550 loop_entry, 3551 body_exit, 3552 loop_successor, 3553 break_info.break_block()); 3554 set_current_block(loop_exit); 3555 } 3556 3557 3558 void HOptimizedGraphBuilder::VisitForStatement(ForStatement* stmt) { 3559 ASSERT(!HasStackOverflow()); 3560 ASSERT(current_block() != NULL); 3561 ASSERT(current_block()->HasPredecessor()); 3562 if (stmt->init() != NULL) { 3563 CHECK_ALIVE(Visit(stmt->init())); 3564 } 3565 ASSERT(current_block() != NULL); 3566 HBasicBlock* loop_entry = osr_->BuildPossibleOsrLoopEntry(stmt); 3567 3568 HBasicBlock* loop_successor = NULL; 3569 if (stmt->cond() != NULL) { 3570 HBasicBlock* body_entry = graph()->CreateBasicBlock(); 3571 loop_successor = graph()->CreateBasicBlock(); 3572 CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor)); 3573 if (body_entry->HasPredecessor()) { 3574 body_entry->SetJoinId(stmt->BodyId()); 3575 set_current_block(body_entry); 3576 } 3577 if (loop_successor->HasPredecessor()) { 3578 loop_successor->SetJoinId(stmt->ExitId()); 3579 } else { 3580 loop_successor = NULL; 3581 } 3582 } 3583 3584 BreakAndContinueInfo break_info(stmt); 3585 if (current_block() != NULL) { 3586 CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info)); 3587 } 3588 HBasicBlock* body_exit = 3589 JoinContinue(stmt, current_block(), break_info.continue_block()); 3590 3591 if (stmt->next() != NULL && body_exit != NULL) { 3592 set_current_block(body_exit); 3593 CHECK_BAILOUT(Visit(stmt->next())); 3594 body_exit = current_block(); 3595 } 3596 3597 HBasicBlock* loop_exit = CreateLoop(stmt, 3598 loop_entry, 3599 body_exit, 3600 loop_successor, 3601 break_info.break_block()); 3602 set_current_block(loop_exit); 3603 } 3604 3605 3606 void HOptimizedGraphBuilder::VisitForInStatement(ForInStatement* stmt) { 3607 ASSERT(!HasStackOverflow()); 3608 ASSERT(current_block() != NULL); 3609 ASSERT(current_block()->HasPredecessor()); 3610 3611 if (!FLAG_optimize_for_in) { 3612 return Bailout(kForInStatementOptimizationIsDisabled); 3613 } 3614 3615 if (stmt->for_in_type() != ForInStatement::FAST_FOR_IN) { 3616 return Bailout(kForInStatementIsNotFastCase); 3617 } 3618 3619 if (!stmt->each()->IsVariableProxy() || 3620 !stmt->each()->AsVariableProxy()->var()->IsStackLocal()) { 3621 return Bailout(kForInStatementWithNonLocalEachVariable); 3622 } 3623 3624 Variable* each_var = stmt->each()->AsVariableProxy()->var(); 3625 3626 CHECK_ALIVE(VisitForValue(stmt->enumerable())); 3627 HValue* enumerable = Top(); // Leave enumerable at the top. 3628 3629 HInstruction* map = Add<HForInPrepareMap>(enumerable); 3630 Add<HSimulate>(stmt->PrepareId()); 3631 3632 HInstruction* array = Add<HForInCacheArray>( 3633 enumerable, map, DescriptorArray::kEnumCacheBridgeCacheIndex); 3634 3635 HInstruction* enum_length = Add<HMapEnumLength>(map); 3636 3637 HInstruction* start_index = Add<HConstant>(0); 3638 3639 Push(map); 3640 Push(array); 3641 Push(enum_length); 3642 Push(start_index); 3643 3644 HInstruction* index_cache = Add<HForInCacheArray>( 3645 enumerable, map, DescriptorArray::kEnumCacheBridgeIndicesCacheIndex); 3646 HForInCacheArray::cast(array)->set_index_cache( 3647 HForInCacheArray::cast(index_cache)); 3648 3649 HBasicBlock* loop_entry = osr_->BuildPossibleOsrLoopEntry(stmt); 3650 3651 HValue* index = environment()->ExpressionStackAt(0); 3652 HValue* limit = environment()->ExpressionStackAt(1); 3653 3654 // Check that we still have more keys. 3655 HCompareNumericAndBranch* compare_index = 3656 new(zone()) HCompareNumericAndBranch(index, limit, Token::LT); 3657 compare_index->set_observed_input_representation( 3658 Representation::Smi(), Representation::Smi()); 3659 3660 HBasicBlock* loop_body = graph()->CreateBasicBlock(); 3661 HBasicBlock* loop_successor = graph()->CreateBasicBlock(); 3662 3663 compare_index->SetSuccessorAt(0, loop_body); 3664 compare_index->SetSuccessorAt(1, loop_successor); 3665 current_block()->Finish(compare_index); 3666 3667 set_current_block(loop_successor); 3668 Drop(5); 3669 3670 set_current_block(loop_body); 3671 3672 HValue* key = Add<HLoadKeyed>( 3673 environment()->ExpressionStackAt(2), // Enum cache. 3674 environment()->ExpressionStackAt(0), // Iteration index. 3675 environment()->ExpressionStackAt(0), 3676 FAST_ELEMENTS); 3677 3678 // Check if the expected map still matches that of the enumerable. 3679 // If not just deoptimize. 3680 Add<HCheckMapValue>(environment()->ExpressionStackAt(4), 3681 environment()->ExpressionStackAt(3)); 3682 3683 Bind(each_var, key); 3684 3685 BreakAndContinueInfo break_info(stmt, 5); 3686 CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info)); 3687 3688 HBasicBlock* body_exit = 3689 JoinContinue(stmt, current_block(), break_info.continue_block()); 3690 3691 if (body_exit != NULL) { 3692 set_current_block(body_exit); 3693 3694 HValue* current_index = Pop(); 3695 HInstruction* new_index = New<HAdd>(current_index, 3696 graph()->GetConstant1()); 3697 PushAndAdd(new_index); 3698 body_exit = current_block(); 3699 } 3700 3701 HBasicBlock* loop_exit = CreateLoop(stmt, 3702 loop_entry, 3703 body_exit, 3704 loop_successor, 3705 break_info.break_block()); 3706 3707 set_current_block(loop_exit); 3708 } 3709 3710 3711 void HOptimizedGraphBuilder::VisitForOfStatement(ForOfStatement* stmt) { 3712 ASSERT(!HasStackOverflow()); 3713 ASSERT(current_block() != NULL); 3714 ASSERT(current_block()->HasPredecessor()); 3715 return Bailout(kForOfStatement); 3716 } 3717 3718 3719 void HOptimizedGraphBuilder::VisitTryCatchStatement(TryCatchStatement* stmt) { 3720 ASSERT(!HasStackOverflow()); 3721 ASSERT(current_block() != NULL); 3722 ASSERT(current_block()->HasPredecessor()); 3723 return Bailout(kTryCatchStatement); 3724 } 3725 3726 3727 void HOptimizedGraphBuilder::VisitTryFinallyStatement( 3728 TryFinallyStatement* stmt) { 3729 ASSERT(!HasStackOverflow()); 3730 ASSERT(current_block() != NULL); 3731 ASSERT(current_block()->HasPredecessor()); 3732 return Bailout(kTryFinallyStatement); 3733 } 3734 3735 3736 void HOptimizedGraphBuilder::VisitDebuggerStatement(DebuggerStatement* stmt) { 3737 ASSERT(!HasStackOverflow()); 3738 ASSERT(current_block() != NULL); 3739 ASSERT(current_block()->HasPredecessor()); 3740 return Bailout(kDebuggerStatement); 3741 } 3742 3743 3744 static Handle<SharedFunctionInfo> SearchSharedFunctionInfo( 3745 Code* unoptimized_code, FunctionLiteral* expr) { 3746 int start_position = expr->start_position(); 3747 for (RelocIterator it(unoptimized_code); !it.done(); it.next()) { 3748 RelocInfo* rinfo = it.rinfo(); 3749 if (rinfo->rmode() != RelocInfo::EMBEDDED_OBJECT) continue; 3750 Object* obj = rinfo->target_object(); 3751 if (obj->IsSharedFunctionInfo()) { 3752 SharedFunctionInfo* shared = SharedFunctionInfo::cast(obj); 3753 if (shared->start_position() == start_position) { 3754 return Handle<SharedFunctionInfo>(shared); 3755 } 3756 } 3757 } 3758 3759 return Handle<SharedFunctionInfo>(); 3760 } 3761 3762 3763 void HOptimizedGraphBuilder::VisitFunctionLiteral(FunctionLiteral* expr) { 3764 ASSERT(!HasStackOverflow()); 3765 ASSERT(current_block() != NULL); 3766 ASSERT(current_block()->HasPredecessor()); 3767 Handle<SharedFunctionInfo> shared_info = 3768 SearchSharedFunctionInfo(current_info()->shared_info()->code(), expr); 3769 if (shared_info.is_null()) { 3770 shared_info = Compiler::BuildFunctionInfo(expr, current_info()->script()); 3771 } 3772 // We also have a stack overflow if the recursive compilation did. 3773 if (HasStackOverflow()) return; 3774 HValue* context = environment()->context(); 3775 HFunctionLiteral* instr = 3776 new(zone()) HFunctionLiteral(context, shared_info, expr->pretenure()); 3777 return ast_context()->ReturnInstruction(instr, expr->id()); 3778 } 3779 3780 3781 void HOptimizedGraphBuilder::VisitSharedFunctionInfoLiteral( 3782 SharedFunctionInfoLiteral* expr) { 3783 ASSERT(!HasStackOverflow()); 3784 ASSERT(current_block() != NULL); 3785 ASSERT(current_block()->HasPredecessor()); 3786 return Bailout(kSharedFunctionInfoLiteral); 3787 } 3788 3789 3790 void HOptimizedGraphBuilder::VisitConditional(Conditional* expr) { 3791 ASSERT(!HasStackOverflow()); 3792 ASSERT(current_block() != NULL); 3793 ASSERT(current_block()->HasPredecessor()); 3794 HBasicBlock* cond_true = graph()->CreateBasicBlock(); 3795 HBasicBlock* cond_false = graph()->CreateBasicBlock(); 3796 CHECK_BAILOUT(VisitForControl(expr->condition(), cond_true, cond_false)); 3797 3798 // Visit the true and false subexpressions in the same AST context as the 3799 // whole expression. 3800 if (cond_true->HasPredecessor()) { 3801 cond_true->SetJoinId(expr->ThenId()); 3802 set_current_block(cond_true); 3803 CHECK_BAILOUT(Visit(expr->then_expression())); 3804 cond_true = current_block(); 3805 } else { 3806 cond_true = NULL; 3807 } 3808 3809 if (cond_false->HasPredecessor()) { 3810 cond_false->SetJoinId(expr->ElseId()); 3811 set_current_block(cond_false); 3812 CHECK_BAILOUT(Visit(expr->else_expression())); 3813 cond_false = current_block(); 3814 } else { 3815 cond_false = NULL; 3816 } 3817 3818 if (!ast_context()->IsTest()) { 3819 HBasicBlock* join = CreateJoin(cond_true, cond_false, expr->id()); 3820 set_current_block(join); 3821 if (join != NULL && !ast_context()->IsEffect()) { 3822 return ast_context()->ReturnValue(Pop()); 3823 } 3824 } 3825 } 3826 3827 3828 HOptimizedGraphBuilder::GlobalPropertyAccess 3829 HOptimizedGraphBuilder::LookupGlobalProperty( 3830 Variable* var, LookupResult* lookup, bool is_store) { 3831 if (var->is_this() || !current_info()->has_global_object()) { 3832 return kUseGeneric; 3833 } 3834 Handle<GlobalObject> global(current_info()->global_object()); 3835 global->Lookup(*var->name(), lookup); 3836 if (!lookup->IsNormal() || 3837 (is_store && lookup->IsReadOnly()) || 3838 lookup->holder() != *global) { 3839 return kUseGeneric; 3840 } 3841 3842 return kUseCell; 3843 } 3844 3845 3846 HValue* HOptimizedGraphBuilder::BuildContextChainWalk(Variable* var) { 3847 ASSERT(var->IsContextSlot()); 3848 HValue* context = environment()->context(); 3849 int length = current_info()->scope()->ContextChainLength(var->scope()); 3850 while (length-- > 0) { 3851 context = Add<HOuterContext>(context); 3852 } 3853 return context; 3854 } 3855 3856 3857 void HOptimizedGraphBuilder::VisitVariableProxy(VariableProxy* expr) { 3858 ASSERT(!HasStackOverflow()); 3859 ASSERT(current_block() != NULL); 3860 ASSERT(current_block()->HasPredecessor()); 3861 Variable* variable = expr->var(); 3862 switch (variable->location()) { 3863 case Variable::UNALLOCATED: { 3864 if (IsLexicalVariableMode(variable->mode())) { 3865 // TODO(rossberg): should this be an ASSERT? 3866 return Bailout(kReferenceToGlobalLexicalVariable); 3867 } 3868 // Handle known global constants like 'undefined' specially to avoid a 3869 // load from a global cell for them. 3870 Handle<Object> constant_value = 3871 isolate()->factory()->GlobalConstantFor(variable->name()); 3872 if (!constant_value.is_null()) { 3873 HConstant* instr = New<HConstant>(constant_value); 3874 return ast_context()->ReturnInstruction(instr, expr->id()); 3875 } 3876 3877 LookupResult lookup(isolate()); 3878 GlobalPropertyAccess type = 3879 LookupGlobalProperty(variable, &lookup, false); 3880 3881 if (type == kUseCell && 3882 current_info()->global_object()->IsAccessCheckNeeded()) { 3883 type = kUseGeneric; 3884 } 3885 3886 if (type == kUseCell) { 3887 Handle<GlobalObject> global(current_info()->global_object()); 3888 Handle<PropertyCell> cell(global->GetPropertyCell(&lookup)); 3889 if (cell->type()->IsConstant()) { 3890 cell->AddDependentCompilationInfo(top_info()); 3891 Handle<Object> constant_object = cell->type()->AsConstant(); 3892 if (constant_object->IsConsString()) { 3893 constant_object = 3894 FlattenGetString(Handle<String>::cast(constant_object)); 3895 } 3896 HConstant* constant = New<HConstant>(constant_object); 3897 return ast_context()->ReturnInstruction(constant, expr->id()); 3898 } else { 3899 HLoadGlobalCell* instr = 3900 new(zone()) HLoadGlobalCell(cell, lookup.GetPropertyDetails()); 3901 return ast_context()->ReturnInstruction(instr, expr->id()); 3902 } 3903 } else { 3904 HValue* context = environment()->context(); 3905 HGlobalObject* global_object = new(zone()) HGlobalObject(context); 3906 AddInstruction(global_object); 3907 HLoadGlobalGeneric* instr = 3908 new(zone()) HLoadGlobalGeneric(context, 3909 global_object, 3910 variable->name(), 3911 ast_context()->is_for_typeof()); 3912 instr->set_position(expr->position()); 3913 return ast_context()->ReturnInstruction(instr, expr->id()); 3914 } 3915 } 3916 3917 case Variable::PARAMETER: 3918 case Variable::LOCAL: { 3919 HValue* value = LookupAndMakeLive(variable); 3920 if (value == graph()->GetConstantHole()) { 3921 ASSERT(IsDeclaredVariableMode(variable->mode()) && 3922 variable->mode() != VAR); 3923 return Bailout(kReferenceToUninitializedVariable); 3924 } 3925 return ast_context()->ReturnValue(value); 3926 } 3927 3928 case Variable::CONTEXT: { 3929 HValue* context = BuildContextChainWalk(variable); 3930 HLoadContextSlot* instr = new(zone()) HLoadContextSlot(context, variable); 3931 return ast_context()->ReturnInstruction(instr, expr->id()); 3932 } 3933 3934 case Variable::LOOKUP: 3935 return Bailout(kReferenceToAVariableWhichRequiresDynamicLookup); 3936 } 3937 } 3938 3939 3940 void HOptimizedGraphBuilder::VisitLiteral(Literal* expr) { 3941 ASSERT(!HasStackOverflow()); 3942 ASSERT(current_block() != NULL); 3943 ASSERT(current_block()->HasPredecessor()); 3944 HConstant* instr = New<HConstant>(expr->value()); 3945 return ast_context()->ReturnInstruction(instr, expr->id()); 3946 } 3947 3948 3949 void HOptimizedGraphBuilder::VisitRegExpLiteral(RegExpLiteral* expr) { 3950 ASSERT(!HasStackOverflow()); 3951 ASSERT(current_block() != NULL); 3952 ASSERT(current_block()->HasPredecessor()); 3953 Handle<JSFunction> closure = function_state()->compilation_info()->closure(); 3954 Handle<FixedArray> literals(closure->literals()); 3955 HValue* context = environment()->context(); 3956 3957 HRegExpLiteral* instr = new(zone()) HRegExpLiteral(context, 3958 literals, 3959 expr->pattern(), 3960 expr->flags(), 3961 expr->literal_index()); 3962 return ast_context()->ReturnInstruction(instr, expr->id()); 3963 } 3964 3965 3966 static void LookupInPrototypes(Handle<Map> map, 3967 Handle<String> name, 3968 LookupResult* lookup) { 3969 while (map->prototype()->IsJSObject()) { 3970 Handle<JSObject> holder(JSObject::cast(map->prototype())); 3971 if (!holder->HasFastProperties()) break; 3972 map = Handle<Map>(holder->map()); 3973 map->LookupDescriptor(*holder, *name, lookup); 3974 if (lookup->IsFound()) return; 3975 } 3976 lookup->NotFound(); 3977 } 3978 3979 3980 // Tries to find a JavaScript accessor of the given name in the prototype chain 3981 // starting at the given map. Return true iff there is one, including the 3982 // corresponding AccessorPair plus its holder (which could be null when the 3983 // accessor is found directly in the given map). 3984 static bool LookupAccessorPair(Handle<Map> map, 3985 Handle<String> name, 3986 Handle<AccessorPair>* accessors, 3987 Handle<JSObject>* holder) { 3988 Isolate* isolate = map->GetIsolate(); 3989 LookupResult lookup(isolate); 3990 3991 // Check for a JavaScript accessor directly in the map. 3992 map->LookupDescriptor(NULL, *name, &lookup); 3993 if (lookup.IsPropertyCallbacks()) { 3994 Handle<Object> callback(lookup.GetValueFromMap(*map), isolate); 3995 if (!callback->IsAccessorPair()) return false; 3996 *accessors = Handle<AccessorPair>::cast(callback); 3997 *holder = Handle<JSObject>(); 3998 return true; 3999 } 4000 4001 // Everything else, e.g. a field, can't be an accessor call. 4002 if (lookup.IsFound()) return false; 4003 4004 // Check for a JavaScript accessor somewhere in the proto chain. 4005 LookupInPrototypes(map, name, &lookup); 4006 if (lookup.IsPropertyCallbacks()) { 4007 Handle<Object> callback(lookup.GetValue(), isolate); 4008 if (!callback->IsAccessorPair()) return false; 4009 *accessors = Handle<AccessorPair>::cast(callback); 4010 *holder = Handle<JSObject>(lookup.holder()); 4011 return true; 4012 } 4013 4014 // We haven't found a JavaScript accessor anywhere. 4015 return false; 4016 } 4017 4018 4019 static bool LookupGetter(Handle<Map> map, 4020 Handle<String> name, 4021 Handle<JSFunction>* getter, 4022 Handle<JSObject>* holder) { 4023 Handle<AccessorPair> accessors; 4024 if (LookupAccessorPair(map, name, &accessors, holder) && 4025 accessors->getter()->IsJSFunction()) { 4026 *getter = Handle<JSFunction>(JSFunction::cast(accessors->getter())); 4027 return true; 4028 } 4029 return false; 4030 } 4031 4032 4033 static bool LookupSetter(Handle<Map> map, 4034 Handle<String> name, 4035 Handle<JSFunction>* setter, 4036 Handle<JSObject>* holder) { 4037 Handle<AccessorPair> accessors; 4038 if (LookupAccessorPair(map, name, &accessors, holder) && 4039 accessors->setter()->IsJSFunction()) { 4040 *setter = Handle<JSFunction>(JSFunction::cast(accessors->setter())); 4041 return true; 4042 } 4043 return false; 4044 } 4045 4046 4047 // Determines whether the given array or object literal boilerplate satisfies 4048 // all limits to be considered for fast deep-copying and computes the total 4049 // size of all objects that are part of the graph. 4050 static bool IsFastLiteral(Handle<JSObject> boilerplate, 4051 int max_depth, 4052 int* max_properties, 4053 int* data_size, 4054 int* pointer_size) { 4055 if (boilerplate->map()->is_deprecated()) { 4056 Handle<Object> result = JSObject::TryMigrateInstance(boilerplate); 4057 if (result->IsSmi()) return false; 4058 } 4059 4060 ASSERT(max_depth >= 0 && *max_properties >= 0); 4061 if (max_depth == 0) return false; 4062 4063 Isolate* isolate = boilerplate->GetIsolate(); 4064 Handle<FixedArrayBase> elements(boilerplate->elements()); 4065 if (elements->length() > 0 && 4066 elements->map() != isolate->heap()->fixed_cow_array_map()) { 4067 if (boilerplate->HasFastDoubleElements()) { 4068 *data_size += FixedDoubleArray::SizeFor(elements->length()); 4069 } else if (boilerplate->HasFastObjectElements()) { 4070 Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements); 4071 int length = elements->length(); 4072 for (int i = 0; i < length; i++) { 4073 if ((*max_properties)-- == 0) return false; 4074 Handle<Object> value(fast_elements->get(i), isolate); 4075 if (value->IsJSObject()) { 4076 Handle<JSObject> value_object = Handle<JSObject>::cast(value); 4077 if (!IsFastLiteral(value_object, 4078 max_depth - 1, 4079 max_properties, 4080 data_size, 4081 pointer_size)) { 4082 return false; 4083 } 4084 } 4085 } 4086 *pointer_size += FixedArray::SizeFor(length); 4087 } else { 4088 return false; 4089 } 4090 } 4091 4092 Handle<FixedArray> properties(boilerplate->properties()); 4093 if (properties->length() > 0) { 4094 return false; 4095 } else { 4096 Handle<DescriptorArray> descriptors( 4097 boilerplate->map()->instance_descriptors()); 4098 int limit = boilerplate->map()->NumberOfOwnDescriptors(); 4099 for (int i = 0; i < limit; i++) { 4100 PropertyDetails details = descriptors->GetDetails(i); 4101 if (details.type() != FIELD) continue; 4102 Representation representation = details.representation(); 4103 int index = descriptors->GetFieldIndex(i); 4104 if ((*max_properties)-- == 0) return false; 4105 Handle<Object> value(boilerplate->InObjectPropertyAt(index), isolate); 4106 if (value->IsJSObject()) { 4107 Handle<JSObject> value_object = Handle<JSObject>::cast(value); 4108 if (!IsFastLiteral(value_object, 4109 max_depth - 1, 4110 max_properties, 4111 data_size, 4112 pointer_size)) { 4113 return false; 4114 } 4115 } else if (representation.IsDouble()) { 4116 *data_size += HeapNumber::kSize; 4117 } 4118 } 4119 } 4120 4121 *pointer_size += boilerplate->map()->instance_size(); 4122 return true; 4123 } 4124 4125 4126 void HOptimizedGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) { 4127 ASSERT(!HasStackOverflow()); 4128 ASSERT(current_block() != NULL); 4129 ASSERT(current_block()->HasPredecessor()); 4130 Handle<JSFunction> closure = function_state()->compilation_info()->closure(); 4131 HValue* context = environment()->context(); 4132 HInstruction* literal; 4133 4134 // Check whether to use fast or slow deep-copying for boilerplate. 4135 int data_size = 0; 4136 int pointer_size = 0; 4137 int max_properties = kMaxFastLiteralProperties; 4138 Handle<Object> original_boilerplate(closure->literals()->get( 4139 expr->literal_index()), isolate()); 4140 if (original_boilerplate->IsJSObject() && 4141 IsFastLiteral(Handle<JSObject>::cast(original_boilerplate), 4142 kMaxFastLiteralDepth, 4143 &max_properties, 4144 &data_size, 4145 &pointer_size)) { 4146 Handle<JSObject> original_boilerplate_object = 4147 Handle<JSObject>::cast(original_boilerplate); 4148 Handle<JSObject> boilerplate_object = 4149 DeepCopy(original_boilerplate_object); 4150 4151 literal = BuildFastLiteral(context, 4152 boilerplate_object, 4153 original_boilerplate_object, 4154 Handle<Object>::null(), 4155 data_size, 4156 pointer_size, 4157 DONT_TRACK_ALLOCATION_SITE); 4158 } else { 4159 NoObservableSideEffectsScope no_effects(this); 4160 Handle<FixedArray> closure_literals(closure->literals(), isolate()); 4161 Handle<FixedArray> constant_properties = expr->constant_properties(); 4162 int literal_index = expr->literal_index(); 4163 int flags = expr->fast_elements() 4164 ? ObjectLiteral::kFastElements : ObjectLiteral::kNoFlags; 4165 flags |= expr->has_function() 4166 ? ObjectLiteral::kHasFunction : ObjectLiteral::kNoFlags; 4167 4168 Add<HPushArgument>(Add<HConstant>(closure_literals)); 4169 Add<HPushArgument>(Add<HConstant>(literal_index)); 4170 Add<HPushArgument>(Add<HConstant>(constant_properties)); 4171 Add<HPushArgument>(Add<HConstant>(flags)); 4172 4173 Runtime::FunctionId function_id = 4174 (expr->depth() > 1 || expr->may_store_doubles()) 4175 ? Runtime::kCreateObjectLiteral : Runtime::kCreateObjectLiteralShallow; 4176 literal = Add<HCallRuntime>(isolate()->factory()->empty_string(), 4177 Runtime::FunctionForId(function_id), 4178 4); 4179 } 4180 4181 // The object is expected in the bailout environment during computation 4182 // of the property values and is the value of the entire expression. 4183 Push(literal); 4184 4185 expr->CalculateEmitStore(zone()); 4186 4187 for (int i = 0; i < expr->properties()->length(); i++) { 4188 ObjectLiteral::Property* property = expr->properties()->at(i); 4189 if (property->IsCompileTimeValue()) continue; 4190 4191 Literal* key = property->key(); 4192 Expression* value = property->value(); 4193 4194 switch (property->kind()) { 4195 case ObjectLiteral::Property::MATERIALIZED_LITERAL: 4196 ASSERT(!CompileTimeValue::IsCompileTimeValue(value)); 4197 // Fall through. 4198 case ObjectLiteral::Property::COMPUTED: 4199 if (key->value()->IsInternalizedString()) { 4200 if (property->emit_store()) { 4201 CHECK_ALIVE(VisitForValue(value)); 4202 HValue* value = Pop(); 4203 Handle<Map> map = property->GetReceiverType(); 4204 Handle<String> name = property->key()->AsPropertyName(); 4205 HInstruction* store; 4206 if (map.is_null()) { 4207 // If we don't know the monomorphic type, do a generic store. 4208 CHECK_ALIVE(store = BuildStoreNamedGeneric(literal, name, value)); 4209 } else { 4210 #if DEBUG 4211 Handle<JSFunction> setter; 4212 Handle<JSObject> holder; 4213 ASSERT(!LookupSetter(map, name, &setter, &holder)); 4214 #endif 4215 CHECK_ALIVE(store = BuildStoreNamedMonomorphic(literal, 4216 name, 4217 value, 4218 map)); 4219 } 4220 AddInstruction(store); 4221 if (store->HasObservableSideEffects()) { 4222 Add<HSimulate>(key->id(), REMOVABLE_SIMULATE); 4223 } 4224 } else { 4225 CHECK_ALIVE(VisitForEffect(value)); 4226 } 4227 break; 4228 } 4229 // Fall through. 4230 case ObjectLiteral::Property::PROTOTYPE: 4231 case ObjectLiteral::Property::SETTER: 4232 case ObjectLiteral::Property::GETTER: 4233 return Bailout(kObjectLiteralWithComplexProperty); 4234 default: UNREACHABLE(); 4235 } 4236 } 4237 4238 if (expr->has_function()) { 4239 // Return the result of the transformation to fast properties 4240 // instead of the original since this operation changes the map 4241 // of the object. This makes sure that the original object won't 4242 // be used by other optimized code before it is transformed 4243 // (e.g. because of code motion). 4244 HToFastProperties* result = Add<HToFastProperties>(Pop()); 4245 return ast_context()->ReturnValue(result); 4246 } else { 4247 return ast_context()->ReturnValue(Pop()); 4248 } 4249 } 4250 4251 4252 void HOptimizedGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) { 4253 ASSERT(!HasStackOverflow()); 4254 ASSERT(current_block() != NULL); 4255 ASSERT(current_block()->HasPredecessor()); 4256 ZoneList<Expression*>* subexprs = expr->values(); 4257 int length = subexprs->length(); 4258 HValue* context = environment()->context(); 4259 HInstruction* literal; 4260 4261 Handle<AllocationSite> site; 4262 Handle<FixedArray> literals(environment()->closure()->literals(), isolate()); 4263 bool uninitialized = false; 4264 Handle<Object> literals_cell(literals->get(expr->literal_index()), 4265 isolate()); 4266 Handle<Object> raw_boilerplate; 4267 if (literals_cell->IsUndefined()) { 4268 uninitialized = true; 4269 raw_boilerplate = Runtime::CreateArrayLiteralBoilerplate( 4270 isolate(), literals, expr->constant_elements()); 4271 if (raw_boilerplate.is_null()) { 4272 return Bailout(kArrayBoilerplateCreationFailed); 4273 } 4274 4275 site = isolate()->factory()->NewAllocationSite(); 4276 site->set_transition_info(*raw_boilerplate); 4277 literals->set(expr->literal_index(), *site); 4278 4279 if (JSObject::cast(*raw_boilerplate)->elements()->map() == 4280 isolate()->heap()->fixed_cow_array_map()) { 4281 isolate()->counters()->cow_arrays_created_runtime()->Increment(); 4282 } 4283 } else { 4284 ASSERT(literals_cell->IsAllocationSite()); 4285 site = Handle<AllocationSite>::cast(literals_cell); 4286 raw_boilerplate = Handle<Object>(site->transition_info(), isolate()); 4287 } 4288 4289 ASSERT(!raw_boilerplate.is_null()); 4290 ASSERT(site->IsLiteralSite()); 4291 4292 Handle<JSObject> original_boilerplate_object = 4293 Handle<JSObject>::cast(raw_boilerplate); 4294 ElementsKind boilerplate_elements_kind = 4295 Handle<JSObject>::cast(original_boilerplate_object)->GetElementsKind(); 4296 4297 // TODO(mvstanton): This heuristic is only a temporary solution. In the 4298 // end, we want to quit creating allocation site info after a certain number 4299 // of GCs for a call site. 4300 AllocationSiteMode mode = AllocationSite::GetMode( 4301 boilerplate_elements_kind); 4302 4303 // Check whether to use fast or slow deep-copying for boilerplate. 4304 int data_size = 0; 4305 int pointer_size = 0; 4306 int max_properties = kMaxFastLiteralProperties; 4307 HCheckMaps* type_check = NULL; 4308 if (IsFastLiteral(original_boilerplate_object, 4309 kMaxFastLiteralDepth, 4310 &max_properties, 4311 &data_size, 4312 &pointer_size)) { 4313 if (mode == TRACK_ALLOCATION_SITE) { 4314 pointer_size += AllocationMemento::kSize; 4315 } 4316 4317 Handle<JSObject> boilerplate_object = DeepCopy(original_boilerplate_object); 4318 literal = BuildFastLiteral(context, 4319 boilerplate_object, 4320 original_boilerplate_object, 4321 site, 4322 data_size, 4323 pointer_size, 4324 mode); 4325 } else { 4326 NoObservableSideEffectsScope no_effects(this); 4327 // Boilerplate already exists and constant elements are never accessed, 4328 // pass an empty fixed array to the runtime function instead. 4329 Handle<FixedArray> constants = isolate()->factory()->empty_fixed_array(); 4330 int literal_index = expr->literal_index(); 4331 4332 Add<HPushArgument>(Add<HConstant>(literals)); 4333 Add<HPushArgument>(Add<HConstant>(literal_index)); 4334 Add<HPushArgument>(Add<HConstant>(constants)); 4335 4336 Runtime::FunctionId function_id = (expr->depth() > 1) 4337 ? Runtime::kCreateArrayLiteral : Runtime::kCreateArrayLiteralShallow; 4338 literal = Add<HCallRuntime>(isolate()->factory()->empty_string(), 4339 Runtime::FunctionForId(function_id), 4340 3); 4341 4342 // De-opt if elements kind changed from boilerplate_elements_kind. 4343 Handle<Map> map = Handle<Map>(original_boilerplate_object->map(), 4344 isolate()); 4345 type_check = Add<HCheckMaps>(literal, map, top_info()); 4346 } 4347 4348 // The array is expected in the bailout environment during computation 4349 // of the property values and is the value of the entire expression. 4350 Push(literal); 4351 // The literal index is on the stack, too. 4352 Push(Add<HConstant>(expr->literal_index())); 4353 4354 HInstruction* elements = NULL; 4355 4356 for (int i = 0; i < length; i++) { 4357 Expression* subexpr = subexprs->at(i); 4358 // If the subexpression is a literal or a simple materialized literal it 4359 // is already set in the cloned array. 4360 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue; 4361 4362 CHECK_ALIVE(VisitForValue(subexpr)); 4363 HValue* value = Pop(); 4364 if (!Smi::IsValid(i)) return Bailout(kNonSmiKeyInArrayLiteral); 4365 4366 elements = AddLoadElements(literal, type_check); 4367 4368 HValue* key = Add<HConstant>(i); 4369 4370 switch (boilerplate_elements_kind) { 4371 case FAST_SMI_ELEMENTS: 4372 case FAST_HOLEY_SMI_ELEMENTS: 4373 case FAST_ELEMENTS: 4374 case FAST_HOLEY_ELEMENTS: 4375 case FAST_DOUBLE_ELEMENTS: 4376 case FAST_HOLEY_DOUBLE_ELEMENTS: { 4377 HStoreKeyed* instr = Add<HStoreKeyed>(elements, key, value, 4378 boilerplate_elements_kind); 4379 instr->SetUninitialized(uninitialized); 4380 break; 4381 } 4382 default: 4383 UNREACHABLE(); 4384 break; 4385 } 4386 4387 Add<HSimulate>(expr->GetIdForElement(i)); 4388 } 4389 4390 Drop(1); // array literal index 4391 return ast_context()->ReturnValue(Pop()); 4392 } 4393 4394 4395 // Sets the lookup result and returns true if the load/store can be inlined. 4396 static bool ComputeLoadStoreField(Handle<Map> type, 4397 Handle<String> name, 4398 LookupResult* lookup, 4399 bool is_store) { 4400 ASSERT(!is_store || !type->is_observed()); 4401 if (type->has_named_interceptor()) { 4402 lookup->InterceptorResult(NULL); 4403 return false; 4404 } 4405 // If we directly find a field, the access can be inlined. 4406 type->LookupDescriptor(NULL, *name, lookup); 4407 if (lookup->IsField()) return true; 4408 4409 // For a load, we are out of luck if there is no such field. 4410 if (!is_store) return false; 4411 4412 // 2nd chance: A store into a non-existent field can still be inlined if we 4413 // have a matching transition and some room left in the object. 4414 type->LookupTransition(NULL, *name, lookup); 4415 return lookup->IsTransitionToField(*type) && 4416 (type->unused_property_fields() > 0); 4417 } 4418 4419 4420 HCheckMaps* HOptimizedGraphBuilder::AddCheckMap(HValue* object, 4421 Handle<Map> map) { 4422 BuildCheckHeapObject(object); 4423 return Add<HCheckMaps>(object, map, top_info()); 4424 } 4425 4426 4427 HInstruction* HOptimizedGraphBuilder::BuildStoreNamedField( 4428 HValue* object, 4429 Handle<String> name, 4430 HValue* value, 4431 Handle<Map> map, 4432 LookupResult* lookup) { 4433 ASSERT(lookup->IsFound()); 4434 // If the property does not exist yet, we have to check that it wasn't made 4435 // readonly or turned into a setter by some meanwhile modifications on the 4436 // prototype chain. 4437 if (!lookup->IsProperty() && map->prototype()->IsJSReceiver()) { 4438 Object* proto = map->prototype(); 4439 // First check that the prototype chain isn't affected already. 4440 LookupResult proto_result(isolate()); 4441 proto->Lookup(*name, &proto_result); 4442 if (proto_result.IsProperty()) { 4443 // If the inherited property could induce readonly-ness, bail out. 4444 if (proto_result.IsReadOnly() || !proto_result.IsCacheable()) { 4445 Bailout(kImproperObjectOnPrototypeChainForStore); 4446 return NULL; 4447 } 4448 // We only need to check up to the preexisting property. 4449 proto = proto_result.holder(); 4450 } else { 4451 // Otherwise, find the top prototype. 4452 while (proto->GetPrototype(isolate())->IsJSObject()) { 4453 proto = proto->GetPrototype(isolate()); 4454 } 4455 ASSERT(proto->GetPrototype(isolate())->IsNull()); 4456 } 4457 ASSERT(proto->IsJSObject()); 4458 BuildCheckPrototypeMaps( 4459 Handle<JSObject>(JSObject::cast(map->prototype())), 4460 Handle<JSObject>(JSObject::cast(proto))); 4461 } 4462 4463 HObjectAccess field_access = HObjectAccess::ForField(map, lookup, name); 4464 bool transition_to_field = lookup->IsTransitionToField(*map); 4465 4466 HStoreNamedField *instr; 4467 if (FLAG_track_double_fields && field_access.representation().IsDouble()) { 4468 HObjectAccess heap_number_access = 4469 field_access.WithRepresentation(Representation::Tagged()); 4470 if (transition_to_field) { 4471 // The store requires a mutable HeapNumber to be allocated. 4472 NoObservableSideEffectsScope no_side_effects(this); 4473 HInstruction* heap_number_size = Add<HConstant>(HeapNumber::kSize); 4474 HInstruction* heap_number = Add<HAllocate>(heap_number_size, 4475 HType::HeapNumber(), isolate()->heap()->GetPretenureMode(), 4476 HEAP_NUMBER_TYPE); 4477 AddStoreMapConstant(heap_number, isolate()->factory()->heap_number_map()); 4478 Add<HStoreNamedField>(heap_number, HObjectAccess::ForHeapNumberValue(), 4479 value); 4480 instr = New<HStoreNamedField>(object, heap_number_access, 4481 heap_number); 4482 } else { 4483 // Already holds a HeapNumber; load the box and write its value field. 4484 HInstruction* heap_number = Add<HLoadNamedField>(object, 4485 heap_number_access); 4486 heap_number->set_type(HType::HeapNumber()); 4487 instr = New<HStoreNamedField>(heap_number, 4488 HObjectAccess::ForHeapNumberValue(), 4489 value); 4490 } 4491 } else { 4492 // This is a normal store. 4493 instr = New<HStoreNamedField>(object, field_access, value); 4494 } 4495 4496 if (transition_to_field) { 4497 Handle<Map> transition(lookup->GetTransitionMapFromMap(*map)); 4498 HConstant* transition_constant = Add<HConstant>(transition); 4499 instr->SetTransition(transition_constant, top_info()); 4500 // TODO(fschneider): Record the new map type of the object in the IR to 4501 // enable elimination of redundant checks after the transition store. 4502 instr->SetGVNFlag(kChangesMaps); 4503 } 4504 return instr; 4505 } 4506 4507 4508 HInstruction* HOptimizedGraphBuilder::BuildStoreNamedGeneric( 4509 HValue* object, 4510 Handle<String> name, 4511 HValue* value) { 4512 HValue* context = environment()->context(); 4513 return new(zone()) HStoreNamedGeneric( 4514 context, 4515 object, 4516 name, 4517 value, 4518 function_strict_mode_flag()); 4519 } 4520 4521 4522 HInstruction* HOptimizedGraphBuilder::BuildStoreNamedMonomorphic( 4523 HValue* object, 4524 Handle<String> name, 4525 HValue* value, 4526 Handle<Map> map) { 4527 // Handle a store to a known field. 4528 LookupResult lookup(isolate()); 4529 if (ComputeLoadStoreField(map, name, &lookup, true)) { 4530 AddCheckMap(object, map); 4531 return BuildStoreNamedField(object, name, value, map, &lookup); 4532 } 4533 4534 // No luck, do a generic store. 4535 return BuildStoreNamedGeneric(object, name, value); 4536 } 4537 4538 4539 static bool CanLoadPropertyFromPrototype(Handle<Map> map, 4540 Handle<Name> name, 4541 LookupResult* lookup) { 4542 if (map->has_named_interceptor()) return false; 4543 if (map->is_dictionary_map()) return false; 4544 map->LookupDescriptor(NULL, *name, lookup); 4545 if (lookup->IsFound()) return false; 4546 return true; 4547 } 4548 4549 4550 HInstruction* HOptimizedGraphBuilder::TryLoadPolymorphicAsMonomorphic( 4551 Property* expr, 4552 HValue* object, 4553 SmallMapList* types, 4554 Handle<String> name) { 4555 // Use monomorphic load if property lookup results in the same field index 4556 // for all maps. Requires special map check on the set of all handled maps. 4557 if (types->length() > kMaxLoadPolymorphism) return NULL; 4558 4559 LookupResult lookup(isolate()); 4560 int count; 4561 HObjectAccess access = HObjectAccess::ForMap(); // initial value unused. 4562 for (count = 0; count < types->length(); ++count) { 4563 Handle<Map> map = types->at(count); 4564 if (!ComputeLoadStoreField(map, name, &lookup, false)) break; 4565 4566 HObjectAccess new_access = HObjectAccess::ForField(map, &lookup, name); 4567 4568 if (count == 0) { 4569 // First time through the loop; set access and representation. 4570 access = new_access; 4571 } else if (!access.representation().IsCompatibleForLoad( 4572 new_access.representation())) { 4573 // Representations did not match. 4574 break; 4575 } else if (access.offset() != new_access.offset()) { 4576 // Offsets did not match. 4577 break; 4578 } else if (access.IsInobject() != new_access.IsInobject()) { 4579 // In-objectness did not match. 4580 break; 4581 } 4582 access = access.WithRepresentation( 4583 access.representation().generalize(new_access.representation())); 4584 } 4585 4586 if (count == types->length()) { 4587 // Everything matched; can use monomorphic load. 4588 BuildCheckHeapObject(object); 4589 HCheckMaps* type_check = Add<HCheckMaps>(object, types); 4590 return BuildLoadNamedField(object, access, type_check); 4591 } 4592 4593 if (count != 0) return NULL; 4594 4595 // Second chance: the property is on the prototype and all maps have the 4596 // same prototype. 4597 Handle<Map> map(types->at(0)); 4598 if (!CanLoadPropertyFromPrototype(map, name, &lookup)) return NULL; 4599 4600 Handle<Object> prototype(map->prototype(), isolate()); 4601 for (count = 1; count < types->length(); ++count) { 4602 Handle<Map> test_map(types->at(count)); 4603 if (!CanLoadPropertyFromPrototype(test_map, name, &lookup)) return NULL; 4604 if (test_map->prototype() != *prototype) return NULL; 4605 } 4606 4607 LookupInPrototypes(map, name, &lookup); 4608 if (!lookup.IsField()) return NULL; 4609 4610 BuildCheckHeapObject(object); 4611 HCheckMaps* type_check = Add<HCheckMaps>(object, types); 4612 4613 Handle<JSObject> holder(lookup.holder()); 4614 Handle<Map> holder_map(holder->map()); 4615 BuildCheckPrototypeMaps(Handle<JSObject>::cast(prototype), holder); 4616 HValue* holder_value = Add<HConstant>(holder); 4617 return BuildLoadNamedField(holder_value, 4618 HObjectAccess::ForField(holder_map, &lookup, name), type_check); 4619 } 4620 4621 4622 // Returns true if an instance of this map can never find a property with this 4623 // name in its prototype chain. This means all prototypes up to the top are 4624 // fast and don't have the name in them. It would be good if we could optimize 4625 // polymorphic loads where the property is sometimes found in the prototype 4626 // chain. 4627 static bool PrototypeChainCanNeverResolve( 4628 Handle<Map> map, Handle<String> name) { 4629 Isolate* isolate = map->GetIsolate(); 4630 Object* current = map->prototype(); 4631 while (current != isolate->heap()->null_value()) { 4632 if (current->IsJSGlobalProxy() || 4633 current->IsGlobalObject() || 4634 !current->IsJSObject() || 4635 JSObject::cast(current)->map()->has_named_interceptor() || 4636 JSObject::cast(current)->IsAccessCheckNeeded() || 4637 !JSObject::cast(current)->HasFastProperties()) { 4638 return false; 4639 } 4640 4641 LookupResult lookup(isolate); 4642 Map* map = JSObject::cast(current)->map(); 4643 map->LookupDescriptor(NULL, *name, &lookup); 4644 if (lookup.IsFound()) return false; 4645 if (!lookup.IsCacheable()) return false; 4646 current = JSObject::cast(current)->GetPrototype(); 4647 } 4648 return true; 4649 } 4650 4651 4652 void HOptimizedGraphBuilder::HandlePolymorphicLoadNamedField( 4653 Property* expr, 4654 HValue* object, 4655 SmallMapList* types, 4656 Handle<String> name) { 4657 HInstruction* instr = TryLoadPolymorphicAsMonomorphic( 4658 expr, object, types, name); 4659 if (instr != NULL) { 4660 instr->set_position(expr->position()); 4661 return ast_context()->ReturnInstruction(instr, expr->id()); 4662 } 4663 4664 // Something did not match; must use a polymorphic load. 4665 int count = 0; 4666 HBasicBlock* join = NULL; 4667 for (int i = 0; i < types->length() && count < kMaxLoadPolymorphism; ++i) { 4668 Handle<Map> map = types->at(i); 4669 LookupResult lookup(isolate()); 4670 if (ComputeLoadStoreField(map, name, &lookup, false) || 4671 (lookup.IsCacheable() && 4672 !map->is_dictionary_map() && 4673 !map->has_named_interceptor() && 4674 (lookup.IsConstant() || 4675 (!lookup.IsFound() && 4676 PrototypeChainCanNeverResolve(map, name))))) { 4677 if (count == 0) { 4678 BuildCheckHeapObject(object); 4679 join = graph()->CreateBasicBlock(); 4680 } 4681 ++count; 4682 HBasicBlock* if_true = graph()->CreateBasicBlock(); 4683 HBasicBlock* if_false = graph()->CreateBasicBlock(); 4684 HCompareMap* compare = 4685 new(zone()) HCompareMap(object, map, if_true, if_false); 4686 current_block()->Finish(compare); 4687 4688 set_current_block(if_true); 4689 4690 // TODO(verwaest): Merge logic with BuildLoadNamedMonomorphic. 4691 if (lookup.IsField()) { 4692 HObjectAccess access = HObjectAccess::ForField(map, &lookup, name); 4693 HLoadNamedField* load = BuildLoadNamedField(object, access, compare); 4694 load->set_position(expr->position()); 4695 AddInstruction(load); 4696 if (!ast_context()->IsEffect()) Push(load); 4697 } else if (lookup.IsConstant()) { 4698 Handle<Object> constant(lookup.GetConstantFromMap(*map), isolate()); 4699 HConstant* hconstant = Add<HConstant>(constant); 4700 if (!ast_context()->IsEffect()) Push(hconstant); 4701 } else { 4702 ASSERT(!lookup.IsFound()); 4703 if (map->prototype()->IsJSObject()) { 4704 Handle<JSObject> prototype(JSObject::cast(map->prototype())); 4705 Handle<JSObject> holder = prototype; 4706 while (holder->map()->prototype()->IsJSObject()) { 4707 holder = handle(JSObject::cast(holder->map()->prototype())); 4708 } 4709 BuildCheckPrototypeMaps(prototype, holder); 4710 } 4711 if (!ast_context()->IsEffect()) Push(graph()->GetConstantUndefined()); 4712 } 4713 4714 current_block()->Goto(join); 4715 set_current_block(if_false); 4716 } 4717 } 4718 4719 // Finish up. Unconditionally deoptimize if we've handled all the maps we 4720 // know about and do not want to handle ones we've never seen. Otherwise 4721 // use a generic IC. 4722 if (count == types->length() && FLAG_deoptimize_uncommon_cases) { 4723 FinishExitWithHardDeoptimization("Unknown map in polymorphic load", join); 4724 } else { 4725 HInstruction* load = BuildLoadNamedGeneric(object, name, expr); 4726 load->set_position(expr->position()); 4727 AddInstruction(load); 4728 if (!ast_context()->IsEffect()) Push(load); 4729 4730 if (join != NULL) { 4731 current_block()->Goto(join); 4732 } else { 4733 Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE); 4734 if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop()); 4735 return; 4736 } 4737 } 4738 4739 ASSERT(join != NULL); 4740 join->SetJoinId(expr->id()); 4741 set_current_block(join); 4742 if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop()); 4743 } 4744 4745 4746 bool HOptimizedGraphBuilder::TryStorePolymorphicAsMonomorphic( 4747 int position, 4748 BailoutId assignment_id, 4749 HValue* object, 4750 HValue* store_value, 4751 HValue* result_value, 4752 SmallMapList* types, 4753 Handle<String> name) { 4754 // Use monomorphic store if property lookup results in the same field index 4755 // for all maps. Requires special map check on the set of all handled maps. 4756 if (types->length() > kMaxStorePolymorphism) return false; 4757 4758 // TODO(verwaest): Merge the checking logic with the code in 4759 // TryLoadPolymorphicAsMonomorphic. 4760 LookupResult lookup(isolate()); 4761 int count; 4762 Representation representation = Representation::None(); 4763 HObjectAccess access = HObjectAccess::ForMap(); // initial value unused. 4764 for (count = 0; count < types->length(); ++count) { 4765 Handle<Map> map = types->at(count); 4766 // Pass false to ignore transitions. 4767 if (!ComputeLoadStoreField(map, name, &lookup, false)) break; 4768 ASSERT(!map->is_observed()); 4769 4770 HObjectAccess new_access = HObjectAccess::ForField(map, &lookup, name); 4771 Representation new_representation = new_access.representation(); 4772 4773 if (count == 0) { 4774 // First time through the loop; set access and representation. 4775 access = new_access; 4776 representation = new_representation; 4777 } else if (!representation.IsCompatibleForStore(new_representation)) { 4778 // Representations did not match. 4779 break; 4780 } else if (access.offset() != new_access.offset()) { 4781 // Offsets did not match. 4782 break; 4783 } else if (access.IsInobject() != new_access.IsInobject()) { 4784 // In-objectness did not match. 4785 break; 4786 } 4787 } 4788 4789 if (count != types->length()) return false; 4790 4791 // Everything matched; can use monomorphic store. 4792 BuildCheckHeapObject(object); 4793 Add<HCheckMaps>(object, types); 4794 HInstruction* store; 4795 CHECK_ALIVE_OR_RETURN( 4796 store = BuildStoreNamedField( 4797 object, name, store_value, types->at(count - 1), &lookup), 4798 true); 4799 if (!ast_context()->IsEffect()) Push(result_value); 4800 store->set_position(position); 4801 AddInstruction(store); 4802 Add<HSimulate>(assignment_id); 4803 if (!ast_context()->IsEffect()) Drop(1); 4804 ast_context()->ReturnValue(result_value); 4805 return true; 4806 } 4807 4808 4809 void HOptimizedGraphBuilder::HandlePolymorphicStoreNamedField( 4810 int position, 4811 BailoutId assignment_id, 4812 HValue* object, 4813 HValue* store_value, 4814 HValue* result_value, 4815 SmallMapList* types, 4816 Handle<String> name) { 4817 if (TryStorePolymorphicAsMonomorphic( 4818 position, assignment_id, object, 4819 store_value, result_value, types, name)) { 4820 return; 4821 } 4822 4823 // TODO(ager): We should recognize when the prototype chains for different 4824 // maps are identical. In that case we can avoid repeatedly generating the 4825 // same prototype map checks. 4826 int count = 0; 4827 HBasicBlock* join = NULL; 4828 for (int i = 0; i < types->length() && count < kMaxStorePolymorphism; ++i) { 4829 Handle<Map> map = types->at(i); 4830 LookupResult lookup(isolate()); 4831 if (ComputeLoadStoreField(map, name, &lookup, true)) { 4832 if (count == 0) { 4833 BuildCheckHeapObject(object); 4834 join = graph()->CreateBasicBlock(); 4835 } 4836 ++count; 4837 HBasicBlock* if_true = graph()->CreateBasicBlock(); 4838 HBasicBlock* if_false = graph()->CreateBasicBlock(); 4839 HCompareMap* compare = 4840 new(zone()) HCompareMap(object, map, if_true, if_false); 4841 current_block()->Finish(compare); 4842 4843 set_current_block(if_true); 4844 HInstruction* instr; 4845 CHECK_ALIVE(instr = BuildStoreNamedField( 4846 object, name, store_value, map, &lookup)); 4847 instr->set_position(position); 4848 // Goto will add the HSimulate for the store. 4849 AddInstruction(instr); 4850 if (!ast_context()->IsEffect()) Push(result_value); 4851 current_block()->Goto(join); 4852 4853 set_current_block(if_false); 4854 } 4855 } 4856 4857 // Finish up. Unconditionally deoptimize if we've handled all the maps we 4858 // know about and do not want to handle ones we've never seen. Otherwise 4859 // use a generic IC. 4860 if (count == types->length() && FLAG_deoptimize_uncommon_cases) { 4861 FinishExitWithHardDeoptimization("Unknown map in polymorphic store", join); 4862 } else { 4863 HInstruction* instr = BuildStoreNamedGeneric(object, name, store_value); 4864 instr->set_position(position); 4865 AddInstruction(instr); 4866 4867 if (join != NULL) { 4868 if (!ast_context()->IsEffect()) { 4869 Push(result_value); 4870 } 4871 current_block()->Goto(join); 4872 } else { 4873 // The HSimulate for the store should not see the stored value in 4874 // effect contexts (it is not materialized at expr->id() in the 4875 // unoptimized code). 4876 if (instr->HasObservableSideEffects()) { 4877 if (ast_context()->IsEffect()) { 4878 Add<HSimulate>(assignment_id, REMOVABLE_SIMULATE); 4879 } else { 4880 Push(result_value); 4881 Add<HSimulate>(assignment_id, REMOVABLE_SIMULATE); 4882 Drop(1); 4883 } 4884 } 4885 return ast_context()->ReturnValue(result_value); 4886 } 4887 } 4888 4889 ASSERT(join != NULL); 4890 join->SetJoinId(assignment_id); 4891 set_current_block(join); 4892 if (!ast_context()->IsEffect()) { 4893 ast_context()->ReturnValue(Pop()); 4894 } 4895 } 4896 4897 4898 void HOptimizedGraphBuilder::HandlePropertyAssignment(Assignment* expr) { 4899 Property* prop = expr->target()->AsProperty(); 4900 ASSERT(prop != NULL); 4901 CHECK_ALIVE(VisitForValue(prop->obj())); 4902 4903 if (prop->key()->IsPropertyName()) { 4904 // Named store. 4905 CHECK_ALIVE(VisitForValue(expr->value())); 4906 HValue* value = environment()->ExpressionStackAt(0); 4907 HValue* object = environment()->ExpressionStackAt(1); 4908 4909 if (expr->IsUninitialized()) { 4910 Add<HDeoptimize>("Insufficient type feedback for property assignment", 4911 Deoptimizer::SOFT); 4912 } 4913 return BuildStoreNamed(expr, expr->id(), expr->position(), 4914 expr->AssignmentId(), prop, object, value, value); 4915 } else { 4916 // Keyed store. 4917 CHECK_ALIVE(VisitForValue(prop->key())); 4918 CHECK_ALIVE(VisitForValue(expr->value())); 4919 HValue* value = environment()->ExpressionStackAt(0); 4920 HValue* key = environment()->ExpressionStackAt(1); 4921 HValue* object = environment()->ExpressionStackAt(2); 4922 bool has_side_effects = false; 4923 HandleKeyedElementAccess(object, key, value, expr, expr->AssignmentId(), 4924 expr->position(), 4925 true, // is_store 4926 &has_side_effects); 4927 Drop(3); 4928 Push(value); 4929 Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE); 4930 return ast_context()->ReturnValue(Pop()); 4931 } 4932 } 4933 4934 4935 // Because not every expression has a position and there is not common 4936 // superclass of Assignment and CountOperation, we cannot just pass the 4937 // owning expression instead of position and ast_id separately. 4938 void HOptimizedGraphBuilder::HandleGlobalVariableAssignment( 4939 Variable* var, 4940 HValue* value, 4941 int position, 4942 BailoutId ast_id) { 4943 LookupResult lookup(isolate()); 4944 GlobalPropertyAccess type = LookupGlobalProperty(var, &lookup, true); 4945 if (type == kUseCell) { 4946 Handle<GlobalObject> global(current_info()->global_object()); 4947 Handle<PropertyCell> cell(global->GetPropertyCell(&lookup)); 4948 if (cell->type()->IsConstant()) { 4949 IfBuilder builder(this); 4950 HValue* constant = Add<HConstant>(cell->type()->AsConstant()); 4951 if (cell->type()->AsConstant()->IsNumber()) { 4952 builder.If<HCompareNumericAndBranch>(value, constant, Token::EQ); 4953 } else { 4954 builder.If<HCompareObjectEqAndBranch>(value, constant); 4955 } 4956 builder.Then(); 4957 builder.Else(); 4958 Add<HDeoptimize>("Constant global variable assignment", 4959 Deoptimizer::EAGER); 4960 builder.End(); 4961 } 4962 HInstruction* instr = 4963 Add<HStoreGlobalCell>(value, cell, lookup.GetPropertyDetails()); 4964 instr->set_position(position); 4965 if (instr->HasObservableSideEffects()) { 4966 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE); 4967 } 4968 } else { 4969 HGlobalObject* global_object = Add<HGlobalObject>(); 4970 HStoreGlobalGeneric* instr = 4971 Add<HStoreGlobalGeneric>(global_object, var->name(), 4972 value, function_strict_mode_flag()); 4973 instr->set_position(position); 4974 ASSERT(instr->HasObservableSideEffects()); 4975 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE); 4976 } 4977 } 4978 4979 4980 void HOptimizedGraphBuilder::BuildStoreNamed(Expression* expr, 4981 BailoutId id, 4982 int position, 4983 BailoutId assignment_id, 4984 Property* prop, 4985 HValue* object, 4986 HValue* store_value, 4987 HValue* result_value) { 4988 Literal* key = prop->key()->AsLiteral(); 4989 Handle<String> name = Handle<String>::cast(key->value()); 4990 ASSERT(!name.is_null()); 4991 4992 HInstruction* instr = NULL; 4993 SmallMapList* types = expr->GetReceiverTypes(); 4994 bool monomorphic = expr->IsMonomorphic(); 4995 Handle<Map> map; 4996 if (monomorphic) { 4997 map = types->first(); 4998 if (map->is_dictionary_map()) monomorphic = false; 4999 } 5000 if (monomorphic) { 5001 Handle<JSFunction> setter; 5002 Handle<JSObject> holder; 5003 if (LookupSetter(map, name, &setter, &holder)) { 5004 AddCheckConstantFunction(holder, object, map); 5005 // Don't try to inline if the result_value is different from the 5006 // store_value. That case isn't handled yet by the inlining. 5007 if (result_value == store_value && 5008 FLAG_inline_accessors && 5009 TryInlineSetter(setter, id, assignment_id, store_value)) { 5010 return; 5011 } 5012 Drop(2); 5013 Add<HPushArgument>(object); 5014 Add<HPushArgument>(store_value); 5015 instr = new(zone()) HCallConstantFunction(setter, 2); 5016 } else { 5017 Drop(2); 5018 CHECK_ALIVE(instr = BuildStoreNamedMonomorphic(object, 5019 name, 5020 store_value, 5021 map)); 5022 } 5023 } else if (types != NULL && types->length() > 1) { 5024 Drop(2); 5025 return HandlePolymorphicStoreNamedField( 5026 position, id, object, 5027 store_value, result_value, types, name); 5028 } else { 5029 Drop(2); 5030 instr = BuildStoreNamedGeneric(object, name, store_value); 5031 } 5032 5033 if (!ast_context()->IsEffect()) Push(result_value); 5034 instr->set_position(position); 5035 AddInstruction(instr); 5036 if (instr->HasObservableSideEffects()) { 5037 Add<HSimulate>(id, REMOVABLE_SIMULATE); 5038 } 5039 if (!ast_context()->IsEffect()) Drop(1); 5040 return ast_context()->ReturnValue(result_value); 5041 } 5042 5043 5044 void HOptimizedGraphBuilder::HandleCompoundAssignment(Assignment* expr) { 5045 Expression* target = expr->target(); 5046 VariableProxy* proxy = target->AsVariableProxy(); 5047 Property* prop = target->AsProperty(); 5048 ASSERT(proxy == NULL || prop == NULL); 5049 5050 // We have a second position recorded in the FullCodeGenerator to have 5051 // type feedback for the binary operation. 5052 BinaryOperation* operation = expr->binary_operation(); 5053 5054 if (proxy != NULL) { 5055 Variable* var = proxy->var(); 5056 if (var->mode() == LET) { 5057 return Bailout(kUnsupportedLetCompoundAssignment); 5058 } 5059 5060 CHECK_ALIVE(VisitForValue(operation)); 5061 5062 switch (var->location()) { 5063 case Variable::UNALLOCATED: 5064 HandleGlobalVariableAssignment(var, 5065 Top(), 5066 expr->position(), 5067 expr->AssignmentId()); 5068 break; 5069 5070 case Variable::PARAMETER: 5071 case Variable::LOCAL: 5072 if (var->mode() == CONST) { 5073 return Bailout(kUnsupportedConstCompoundAssignment); 5074 } 5075 BindIfLive(var, Top()); 5076 break; 5077 5078 case Variable::CONTEXT: { 5079 // Bail out if we try to mutate a parameter value in a function 5080 // using the arguments object. We do not (yet) correctly handle the 5081 // arguments property of the function. 5082 if (current_info()->scope()->arguments() != NULL) { 5083 // Parameters will be allocated to context slots. We have no 5084 // direct way to detect that the variable is a parameter so we do 5085 // a linear search of the parameter variables. 5086 int count = current_info()->scope()->num_parameters(); 5087 for (int i = 0; i < count; ++i) { 5088 if (var == current_info()->scope()->parameter(i)) { 5089 Bailout(kAssignmentToParameterFunctionUsesArgumentsObject); 5090 } 5091 } 5092 } 5093 5094 HStoreContextSlot::Mode mode; 5095 5096 switch (var->mode()) { 5097 case LET: 5098 mode = HStoreContextSlot::kCheckDeoptimize; 5099 break; 5100 case CONST: 5101 return ast_context()->ReturnValue(Pop()); 5102 case CONST_HARMONY: 5103 // This case is checked statically so no need to 5104 // perform checks here 5105 UNREACHABLE(); 5106 default: 5107 mode = HStoreContextSlot::kNoCheck; 5108 } 5109 5110 HValue* context = BuildContextChainWalk(var); 5111 HStoreContextSlot* instr = Add<HStoreContextSlot>( 5112 context, var->index(), mode, Top()); 5113 if (instr->HasObservableSideEffects()) { 5114 Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE); 5115 } 5116 break; 5117 } 5118 5119 case Variable::LOOKUP: 5120 return Bailout(kCompoundAssignmentToLookupSlot); 5121 } 5122 return ast_context()->ReturnValue(Pop()); 5123 5124 } else if (prop != NULL) { 5125 if (prop->key()->IsPropertyName()) { 5126 // Named property. 5127 CHECK_ALIVE(VisitForValue(prop->obj())); 5128 HValue* object = Top(); 5129 5130 Handle<String> name = prop->key()->AsLiteral()->AsPropertyName(); 5131 Handle<Map> map; 5132 HInstruction* load = NULL; 5133 SmallMapList* types = prop->GetReceiverTypes(); 5134 bool monomorphic = prop->IsMonomorphic(); 5135 if (monomorphic) { 5136 map = types->first(); 5137 // We can't generate code for a monomorphic dict mode load so 5138 // just pretend it is not monomorphic. 5139 if (map->is_dictionary_map()) monomorphic = false; 5140 } 5141 if (monomorphic) { 5142 Handle<JSFunction> getter; 5143 Handle<JSObject> holder; 5144 if (LookupGetter(map, name, &getter, &holder)) { 5145 load = BuildCallGetter(object, map, getter, holder); 5146 } else { 5147 load = BuildLoadNamedMonomorphic(object, name, prop, map); 5148 } 5149 } else if (types != NULL && types->length() > 1) { 5150 load = TryLoadPolymorphicAsMonomorphic(prop, object, types, name); 5151 } 5152 if (load == NULL) load = BuildLoadNamedGeneric(object, name, prop); 5153 PushAndAdd(load); 5154 if (load->HasObservableSideEffects()) { 5155 Add<HSimulate>(prop->LoadId(), REMOVABLE_SIMULATE); 5156 } 5157 5158 CHECK_ALIVE(VisitForValue(expr->value())); 5159 HValue* right = Pop(); 5160 HValue* left = Pop(); 5161 5162 HInstruction* instr = BuildBinaryOperation(operation, left, right); 5163 PushAndAdd(instr); 5164 if (instr->HasObservableSideEffects()) { 5165 Add<HSimulate>(operation->id(), REMOVABLE_SIMULATE); 5166 } 5167 5168 return BuildStoreNamed(expr, expr->id(), expr->position(), 5169 expr->AssignmentId(), prop, object, instr, instr); 5170 } else { 5171 // Keyed property. 5172 CHECK_ALIVE(VisitForValue(prop->obj())); 5173 CHECK_ALIVE(VisitForValue(prop->key())); 5174 HValue* obj = environment()->ExpressionStackAt(1); 5175 HValue* key = environment()->ExpressionStackAt(0); 5176 5177 bool has_side_effects = false; 5178 HValue* load = HandleKeyedElementAccess( 5179 obj, key, NULL, prop, prop->LoadId(), RelocInfo::kNoPosition, 5180 false, // is_store 5181 &has_side_effects); 5182 Push(load); 5183 if (has_side_effects) Add<HSimulate>(prop->LoadId(), REMOVABLE_SIMULATE); 5184 5185 CHECK_ALIVE(VisitForValue(expr->value())); 5186 HValue* right = Pop(); 5187 HValue* left = Pop(); 5188 5189 HInstruction* instr = BuildBinaryOperation(operation, left, right); 5190 PushAndAdd(instr); 5191 if (instr->HasObservableSideEffects()) { 5192 Add<HSimulate>(operation->id(), REMOVABLE_SIMULATE); 5193 } 5194 5195 HandleKeyedElementAccess(obj, key, instr, expr, expr->AssignmentId(), 5196 RelocInfo::kNoPosition, 5197 true, // is_store 5198 &has_side_effects); 5199 5200 // Drop the simulated receiver, key, and value. Return the value. 5201 Drop(3); 5202 Push(instr); 5203 ASSERT(has_side_effects); // Stores always have side effects. 5204 Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE); 5205 return ast_context()->ReturnValue(Pop()); 5206 } 5207 5208 } else { 5209 return Bailout(kInvalidLhsInCompoundAssignment); 5210 } 5211 } 5212 5213 5214 void HOptimizedGraphBuilder::VisitAssignment(Assignment* expr) { 5215 ASSERT(!HasStackOverflow()); 5216 ASSERT(current_block() != NULL); 5217 ASSERT(current_block()->HasPredecessor()); 5218 VariableProxy* proxy = expr->target()->AsVariableProxy(); 5219 Property* prop = expr->target()->AsProperty(); 5220 ASSERT(proxy == NULL || prop == NULL); 5221 5222 if (expr->is_compound()) { 5223 HandleCompoundAssignment(expr); 5224 return; 5225 } 5226 5227 if (prop != NULL) { 5228 HandlePropertyAssignment(expr); 5229 } else if (proxy != NULL) { 5230 Variable* var = proxy->var(); 5231 5232 if (var->mode() == CONST) { 5233 if (expr->op() != Token::INIT_CONST) { 5234 CHECK_ALIVE(VisitForValue(expr->value())); 5235 return ast_context()->ReturnValue(Pop()); 5236 } 5237 5238 if (var->IsStackAllocated()) { 5239 // We insert a use of the old value to detect unsupported uses of const 5240 // variables (e.g. initialization inside a loop). 5241 HValue* old_value = environment()->Lookup(var); 5242 Add<HUseConst>(old_value); 5243 } 5244 } else if (var->mode() == CONST_HARMONY) { 5245 if (expr->op() != Token::INIT_CONST_HARMONY) { 5246 return Bailout(kNonInitializerAssignmentToConst); 5247 } 5248 } 5249 5250 if (proxy->IsArguments()) return Bailout(kAssignmentToArguments); 5251 5252 // Handle the assignment. 5253 switch (var->location()) { 5254 case Variable::UNALLOCATED: 5255 CHECK_ALIVE(VisitForValue(expr->value())); 5256 HandleGlobalVariableAssignment(var, 5257 Top(), 5258 expr->position(), 5259 expr->AssignmentId()); 5260 return ast_context()->ReturnValue(Pop()); 5261 5262 case Variable::PARAMETER: 5263 case Variable::LOCAL: { 5264 // Perform an initialization check for let declared variables 5265 // or parameters. 5266 if (var->mode() == LET && expr->op() == Token::ASSIGN) { 5267 HValue* env_value = environment()->Lookup(var); 5268 if (env_value == graph()->GetConstantHole()) { 5269 return Bailout(kAssignmentToLetVariableBeforeInitialization); 5270 } 5271 } 5272 // We do not allow the arguments object to occur in a context where it 5273 // may escape, but assignments to stack-allocated locals are 5274 // permitted. 5275 CHECK_ALIVE(VisitForValue(expr->value(), ARGUMENTS_ALLOWED)); 5276 HValue* value = Pop(); 5277 BindIfLive(var, value); 5278 return ast_context()->ReturnValue(value); 5279 } 5280 5281 case Variable::CONTEXT: { 5282 // Bail out if we try to mutate a parameter value in a function using 5283 // the arguments object. We do not (yet) correctly handle the 5284 // arguments property of the function. 5285 if (current_info()->scope()->arguments() != NULL) { 5286 // Parameters will rewrite to context slots. We have no direct way 5287 // to detect that the variable is a parameter. 5288 int count = current_info()->scope()->num_parameters(); 5289 for (int i = 0; i < count; ++i) { 5290 if (var == current_info()->scope()->parameter(i)) { 5291 return Bailout(kAssignmentToParameterInArgumentsObject); 5292 } 5293 } 5294 } 5295 5296 CHECK_ALIVE(VisitForValue(expr->value())); 5297 HStoreContextSlot::Mode mode; 5298 if (expr->op() == Token::ASSIGN) { 5299 switch (var->mode()) { 5300 case LET: 5301 mode = HStoreContextSlot::kCheckDeoptimize; 5302 break; 5303 case CONST: 5304 return ast_context()->ReturnValue(Pop()); 5305 case CONST_HARMONY: 5306 // This case is checked statically so no need to 5307 // perform checks here 5308 UNREACHABLE(); 5309 default: 5310 mode = HStoreContextSlot::kNoCheck; 5311 } 5312 } else if (expr->op() == Token::INIT_VAR || 5313 expr->op() == Token::INIT_LET || 5314 expr->op() == Token::INIT_CONST_HARMONY) { 5315 mode = HStoreContextSlot::kNoCheck; 5316 } else { 5317 ASSERT(expr->op() == Token::INIT_CONST); 5318 5319 mode = HStoreContextSlot::kCheckIgnoreAssignment; 5320 } 5321 5322 HValue* context = BuildContextChainWalk(var); 5323 HStoreContextSlot* instr = Add<HStoreContextSlot>( 5324 context, var->index(), mode, Top()); 5325 if (instr->HasObservableSideEffects()) { 5326 Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE); 5327 } 5328 return ast_context()->ReturnValue(Pop()); 5329 } 5330 5331 case Variable::LOOKUP: 5332 return Bailout(kAssignmentToLOOKUPVariable); 5333 } 5334 } else { 5335 return Bailout(kInvalidLeftHandSideInAssignment); 5336 } 5337 } 5338 5339 5340 void HOptimizedGraphBuilder::VisitYield(Yield* expr) { 5341 // Generators are not optimized, so we should never get here. 5342 UNREACHABLE(); 5343 } 5344 5345 5346 void HOptimizedGraphBuilder::VisitThrow(Throw* expr) { 5347 ASSERT(!HasStackOverflow()); 5348 ASSERT(current_block() != NULL); 5349 ASSERT(current_block()->HasPredecessor()); 5350 // We don't optimize functions with invalid left-hand sides in 5351 // assignments, count operations, or for-in. Consequently throw can 5352 // currently only occur in an effect context. 5353 ASSERT(ast_context()->IsEffect()); 5354 CHECK_ALIVE(VisitForValue(expr->exception())); 5355 5356 HValue* value = environment()->Pop(); 5357 HThrow* instr = Add<HThrow>(value); 5358 instr->set_position(expr->position()); 5359 Add<HSimulate>(expr->id()); 5360 } 5361 5362 5363 HLoadNamedField* HGraphBuilder::BuildLoadNamedField(HValue* object, 5364 HObjectAccess access, 5365 HValue* typecheck) { 5366 if (FLAG_track_double_fields && access.representation().IsDouble()) { 5367 // load the heap number 5368 HLoadNamedField* heap_number = Add<HLoadNamedField>( 5369 object, access.WithRepresentation(Representation::Tagged())); 5370 heap_number->set_type(HType::HeapNumber()); 5371 // load the double value from it 5372 return New<HLoadNamedField>(heap_number, 5373 HObjectAccess::ForHeapNumberValue(), 5374 typecheck); 5375 } 5376 return New<HLoadNamedField>(object, access, typecheck); 5377 } 5378 5379 5380 HInstruction* HGraphBuilder::BuildLoadStringLength(HValue* object, 5381 HValue* typecheck) { 5382 if (FLAG_fold_constants && object->IsConstant()) { 5383 HConstant* constant = HConstant::cast(object); 5384 if (constant->HasStringValue()) { 5385 return New<HConstant>(constant->StringValue()->length()); 5386 } 5387 } 5388 return BuildLoadNamedField( 5389 object, HObjectAccess::ForStringLength(), typecheck); 5390 } 5391 5392 5393 HInstruction* HOptimizedGraphBuilder::BuildLoadNamedGeneric( 5394 HValue* object, 5395 Handle<String> name, 5396 Property* expr) { 5397 if (expr->IsUninitialized()) { 5398 Add<HDeoptimize>("Insufficient feedback for generic named load", 5399 Deoptimizer::SOFT); 5400 } 5401 HValue* context = environment()->context(); 5402 return new(zone()) HLoadNamedGeneric(context, object, name); 5403 } 5404 5405 5406 HInstruction* HOptimizedGraphBuilder::BuildCallGetter( 5407 HValue* object, 5408 Handle<Map> map, 5409 Handle<JSFunction> getter, 5410 Handle<JSObject> holder) { 5411 AddCheckConstantFunction(holder, object, map); 5412 Add<HPushArgument>(object); 5413 return new(zone()) HCallConstantFunction(getter, 1); 5414 } 5415 5416 5417 HInstruction* HOptimizedGraphBuilder::BuildLoadNamedMonomorphic( 5418 HValue* object, 5419 Handle<String> name, 5420 Property* expr, 5421 Handle<Map> map) { 5422 // Handle a load from a known field. 5423 ASSERT(!map->is_dictionary_map()); 5424 5425 // Handle access to various length properties 5426 if (name->Equals(isolate()->heap()->length_string())) { 5427 if (map->instance_type() == JS_ARRAY_TYPE) { 5428 HCheckMaps* type_check = AddCheckMap(object, map); 5429 return New<HLoadNamedField>(object, 5430 HObjectAccess::ForArrayLength(map->elements_kind()), type_check); 5431 } 5432 } 5433 5434 LookupResult lookup(isolate()); 5435 map->LookupDescriptor(NULL, *name, &lookup); 5436 if (lookup.IsField()) { 5437 HCheckMaps* type_check = AddCheckMap(object, map); 5438 return BuildLoadNamedField(object, 5439 HObjectAccess::ForField(map, &lookup, name), type_check); 5440 } 5441 5442 // Handle a load of a constant known function. 5443 if (lookup.IsConstant()) { 5444 AddCheckMap(object, map); 5445 Handle<Object> constant(lookup.GetConstantFromMap(*map), isolate()); 5446 return New<HConstant>(constant); 5447 } 5448 5449 // Handle a load from a known field somewhere in the prototype chain. 5450 LookupInPrototypes(map, name, &lookup); 5451 if (lookup.IsField()) { 5452 Handle<JSObject> prototype(JSObject::cast(map->prototype())); 5453 Handle<JSObject> holder(lookup.holder()); 5454 Handle<Map> holder_map(holder->map()); 5455 HCheckMaps* type_check = AddCheckMap(object, map); 5456 BuildCheckPrototypeMaps(prototype, holder); 5457 HValue* holder_value = Add<HConstant>(holder); 5458 return BuildLoadNamedField(holder_value, 5459 HObjectAccess::ForField(holder_map, &lookup, name), type_check); 5460 } 5461 5462 // Handle a load of a constant function somewhere in the prototype chain. 5463 if (lookup.IsConstant()) { 5464 Handle<JSObject> prototype(JSObject::cast(map->prototype())); 5465 Handle<JSObject> holder(lookup.holder()); 5466 Handle<Map> holder_map(holder->map()); 5467 AddCheckMap(object, map); 5468 BuildCheckPrototypeMaps(prototype, holder); 5469 Handle<Object> constant(lookup.GetConstantFromMap(*holder_map), isolate()); 5470 return New<HConstant>(constant); 5471 } 5472 5473 // No luck, do a generic load. 5474 return BuildLoadNamedGeneric(object, name, expr); 5475 } 5476 5477 5478 HInstruction* HOptimizedGraphBuilder::BuildLoadKeyedGeneric(HValue* object, 5479 HValue* key) { 5480 HValue* context = environment()->context(); 5481 return new(zone()) HLoadKeyedGeneric(context, object, key); 5482 } 5483 5484 5485 HInstruction* HOptimizedGraphBuilder::BuildMonomorphicElementAccess( 5486 HValue* object, 5487 HValue* key, 5488 HValue* val, 5489 HValue* dependency, 5490 Handle<Map> map, 5491 bool is_store, 5492 KeyedAccessStoreMode store_mode) { 5493 HCheckMaps* mapcheck = Add<HCheckMaps>(object, map, top_info(), dependency); 5494 if (dependency) { 5495 mapcheck->ClearGVNFlag(kDependsOnElementsKind); 5496 } 5497 5498 // Loads from a "stock" fast holey double arrays can elide the hole check. 5499 LoadKeyedHoleMode load_mode = NEVER_RETURN_HOLE; 5500 if (*map == isolate()->get_initial_js_array_map(FAST_HOLEY_DOUBLE_ELEMENTS) && 5501 isolate()->IsFastArrayConstructorPrototypeChainIntact()) { 5502 Handle<JSObject> prototype(JSObject::cast(map->prototype()), isolate()); 5503 Handle<JSObject> object_prototype = isolate()->initial_object_prototype(); 5504 BuildCheckPrototypeMaps(prototype, object_prototype); 5505 load_mode = ALLOW_RETURN_HOLE; 5506 graph()->MarkDependsOnEmptyArrayProtoElements(); 5507 } 5508 5509 return BuildUncheckedMonomorphicElementAccess( 5510 object, key, val, 5511 mapcheck, map->instance_type() == JS_ARRAY_TYPE, 5512 map->elements_kind(), is_store, load_mode, store_mode); 5513 } 5514 5515 5516 HInstruction* HOptimizedGraphBuilder::TryBuildConsolidatedElementLoad( 5517 HValue* object, 5518 HValue* key, 5519 HValue* val, 5520 SmallMapList* maps) { 5521 // For polymorphic loads of similar elements kinds (i.e. all tagged or all 5522 // double), always use the "worst case" code without a transition. This is 5523 // much faster than transitioning the elements to the worst case, trading a 5524 // HTransitionElements for a HCheckMaps, and avoiding mutation of the array. 5525 bool has_double_maps = false; 5526 bool has_smi_or_object_maps = false; 5527 bool has_js_array_access = false; 5528 bool has_non_js_array_access = false; 5529 bool has_seen_holey_elements = false; 5530 Handle<Map> most_general_consolidated_map; 5531 for (int i = 0; i < maps->length(); ++i) { 5532 Handle<Map> map = maps->at(i); 5533 // Don't allow mixing of JSArrays with JSObjects. 5534 if (map->instance_type() == JS_ARRAY_TYPE) { 5535 if (has_non_js_array_access) return NULL; 5536 has_js_array_access = true; 5537 } else if (has_js_array_access) { 5538 return NULL; 5539 } else { 5540 has_non_js_array_access = true; 5541 } 5542 // Don't allow mixed, incompatible elements kinds. 5543 if (map->has_fast_double_elements()) { 5544 if (has_smi_or_object_maps) return NULL; 5545 has_double_maps = true; 5546 } else if (map->has_fast_smi_or_object_elements()) { 5547 if (has_double_maps) return NULL; 5548 has_smi_or_object_maps = true; 5549 } else { 5550 return NULL; 5551 } 5552 // Remember if we've ever seen holey elements. 5553 if (IsHoleyElementsKind(map->elements_kind())) { 5554 has_seen_holey_elements = true; 5555 } 5556 // Remember the most general elements kind, the code for its load will 5557 // properly handle all of the more specific cases. 5558 if ((i == 0) || IsMoreGeneralElementsKindTransition( 5559 most_general_consolidated_map->elements_kind(), 5560 map->elements_kind())) { 5561 most_general_consolidated_map = map; 5562 } 5563 } 5564 if (!has_double_maps && !has_smi_or_object_maps) return NULL; 5565 5566 HCheckMaps* check_maps = Add<HCheckMaps>(object, maps); 5567 // FAST_ELEMENTS is considered more general than FAST_HOLEY_SMI_ELEMENTS. 5568 // If we've seen both, the consolidated load must use FAST_HOLEY_ELEMENTS. 5569 ElementsKind consolidated_elements_kind = has_seen_holey_elements 5570 ? GetHoleyElementsKind(most_general_consolidated_map->elements_kind()) 5571 : most_general_consolidated_map->elements_kind(); 5572 HInstruction* instr = BuildUncheckedMonomorphicElementAccess( 5573 object, key, val, check_maps, 5574 most_general_consolidated_map->instance_type() == JS_ARRAY_TYPE, 5575 consolidated_elements_kind, 5576 false, NEVER_RETURN_HOLE, STANDARD_STORE); 5577 return instr; 5578 } 5579 5580 5581 HValue* HOptimizedGraphBuilder::HandlePolymorphicElementAccess( 5582 HValue* object, 5583 HValue* key, 5584 HValue* val, 5585 Expression* prop, 5586 BailoutId ast_id, 5587 int position, 5588 bool is_store, 5589 KeyedAccessStoreMode store_mode, 5590 bool* has_side_effects) { 5591 *has_side_effects = false; 5592 BuildCheckHeapObject(object); 5593 SmallMapList* maps = prop->GetReceiverTypes(); 5594 5595 if (!is_store) { 5596 HInstruction* consolidated_load = 5597 TryBuildConsolidatedElementLoad(object, key, val, maps); 5598 if (consolidated_load != NULL) { 5599 *has_side_effects |= consolidated_load->HasObservableSideEffects(); 5600 if (position != RelocInfo::kNoPosition) { 5601 consolidated_load->set_position(position); 5602 } 5603 return consolidated_load; 5604 } 5605 } 5606 5607 // Elements_kind transition support. 5608 MapHandleList transition_target(maps->length()); 5609 // Collect possible transition targets. 5610 MapHandleList possible_transitioned_maps(maps->length()); 5611 for (int i = 0; i < maps->length(); ++i) { 5612 Handle<Map> map = maps->at(i); 5613 ElementsKind elements_kind = map->elements_kind(); 5614 if (IsFastElementsKind(elements_kind) && 5615 elements_kind != GetInitialFastElementsKind()) { 5616 possible_transitioned_maps.Add(map); 5617 } 5618 } 5619 // Get transition target for each map (NULL == no transition). 5620 for (int i = 0; i < maps->length(); ++i) { 5621 Handle<Map> map = maps->at(i); 5622 Handle<Map> transitioned_map = 5623 map->FindTransitionedMap(&possible_transitioned_maps); 5624 transition_target.Add(transitioned_map); 5625 } 5626 5627 MapHandleList untransitionable_maps(maps->length()); 5628 HTransitionElementsKind* transition = NULL; 5629 for (int i = 0; i < maps->length(); ++i) { 5630 Handle<Map> map = maps->at(i); 5631 ASSERT(map->IsMap()); 5632 if (!transition_target.at(i).is_null()) { 5633 ASSERT(Map::IsValidElementsTransition( 5634 map->elements_kind(), 5635 transition_target.at(i)->elements_kind())); 5636 transition = Add<HTransitionElementsKind>(object, map, 5637 transition_target.at(i)); 5638 } else { 5639 untransitionable_maps.Add(map); 5640 } 5641 } 5642 5643 // If only one map is left after transitioning, handle this case 5644 // monomorphically. 5645 ASSERT(untransitionable_maps.length() >= 1); 5646 if (untransitionable_maps.length() == 1) { 5647 Handle<Map> untransitionable_map = untransitionable_maps[0]; 5648 HInstruction* instr = NULL; 5649 if (untransitionable_map->has_slow_elements_kind()) { 5650 instr = AddInstruction(is_store ? BuildStoreKeyedGeneric(object, key, val) 5651 : BuildLoadKeyedGeneric(object, key)); 5652 } else { 5653 instr = BuildMonomorphicElementAccess( 5654 object, key, val, transition, untransitionable_map, is_store, 5655 store_mode); 5656 } 5657 *has_side_effects |= instr->HasObservableSideEffects(); 5658 if (position != RelocInfo::kNoPosition) instr->set_position(position); 5659 return is_store ? NULL : instr; 5660 } 5661 5662 HInstruction* checkspec = 5663 AddInstruction(HCheckInstanceType::NewIsSpecObject(object, zone())); 5664 HBasicBlock* join = graph()->CreateBasicBlock(); 5665 5666 HInstruction* elements = AddLoadElements(object, checkspec); 5667 5668 for (int i = 0; i < untransitionable_maps.length(); ++i) { 5669 Handle<Map> map = untransitionable_maps[i]; 5670 ElementsKind elements_kind = map->elements_kind(); 5671 HBasicBlock* this_map = graph()->CreateBasicBlock(); 5672 HBasicBlock* other_map = graph()->CreateBasicBlock(); 5673 HCompareMap* mapcompare = 5674 new(zone()) HCompareMap(object, map, this_map, other_map); 5675 current_block()->Finish(mapcompare); 5676 5677 set_current_block(this_map); 5678 HInstruction* checked_key = NULL; 5679 HInstruction* access = NULL; 5680 if (IsFastElementsKind(elements_kind)) { 5681 if (is_store && !IsFastDoubleElementsKind(elements_kind)) { 5682 Add<HCheckMaps>( 5683 elements, isolate()->factory()->fixed_array_map(), 5684 top_info(), mapcompare); 5685 } 5686 if (map->instance_type() == JS_ARRAY_TYPE) { 5687 HInstruction* length = Add<HLoadNamedField>( 5688 object, HObjectAccess::ForArrayLength(elements_kind), mapcompare); 5689 checked_key = Add<HBoundsCheck>(key, length); 5690 } else { 5691 HInstruction* length = AddLoadFixedArrayLength(elements); 5692 checked_key = Add<HBoundsCheck>(key, length); 5693 } 5694 access = AddFastElementAccess( 5695 elements, checked_key, val, mapcompare, 5696 elements_kind, is_store, NEVER_RETURN_HOLE, STANDARD_STORE); 5697 } else if (IsDictionaryElementsKind(elements_kind)) { 5698 if (is_store) { 5699 access = AddInstruction(BuildStoreKeyedGeneric(object, key, val)); 5700 } else { 5701 access = AddInstruction(BuildLoadKeyedGeneric(object, key)); 5702 } 5703 } else { 5704 ASSERT(IsExternalArrayElementsKind(elements_kind)); 5705 HInstruction* length = AddLoadFixedArrayLength(elements); 5706 checked_key = Add<HBoundsCheck>(key, length); 5707 HLoadExternalArrayPointer* external_elements = 5708 Add<HLoadExternalArrayPointer>(elements); 5709 access = AddExternalArrayElementAccess( 5710 external_elements, checked_key, val, 5711 mapcompare, elements_kind, is_store); 5712 } 5713 *has_side_effects |= access->HasObservableSideEffects(); 5714 // The caller will use has_side_effects and add a correct Simulate. 5715 access->SetFlag(HValue::kHasNoObservableSideEffects); 5716 if (position != RelocInfo::kNoPosition) access->set_position(position); 5717 if (!is_store) { 5718 Push(access); 5719 } 5720 NoObservableSideEffectsScope scope(this); 5721 current_block()->GotoNoSimulate(join); 5722 set_current_block(other_map); 5723 } 5724 5725 // Deopt if none of the cases matched. 5726 NoObservableSideEffectsScope scope(this); 5727 FinishExitWithHardDeoptimization("Unknown type in polymorphic element access", 5728 join); 5729 set_current_block(join); 5730 return is_store ? NULL : Pop(); 5731 } 5732 5733 5734 HValue* HOptimizedGraphBuilder::HandleKeyedElementAccess( 5735 HValue* obj, 5736 HValue* key, 5737 HValue* val, 5738 Expression* expr, 5739 BailoutId ast_id, 5740 int position, 5741 bool is_store, 5742 bool* has_side_effects) { 5743 ASSERT(!expr->IsPropertyName()); 5744 HInstruction* instr = NULL; 5745 if (expr->IsMonomorphic()) { 5746 Handle<Map> map = expr->GetMonomorphicReceiverType(); 5747 if (map->has_slow_elements_kind()) { 5748 instr = is_store ? BuildStoreKeyedGeneric(obj, key, val) 5749 : BuildLoadKeyedGeneric(obj, key); 5750 AddInstruction(instr); 5751 } else { 5752 BuildCheckHeapObject(obj); 5753 instr = BuildMonomorphicElementAccess( 5754 obj, key, val, NULL, map, is_store, expr->GetStoreMode()); 5755 } 5756 } else if (expr->GetReceiverTypes() != NULL && 5757 !expr->GetReceiverTypes()->is_empty()) { 5758 return HandlePolymorphicElementAccess( 5759 obj, key, val, expr, ast_id, position, is_store, 5760 expr->GetStoreMode(), has_side_effects); 5761 } else { 5762 if (is_store) { 5763 if (expr->IsAssignment() && expr->AsAssignment()->IsUninitialized()) { 5764 Add<HDeoptimize>("Insufficient feedback for keyed store", 5765 Deoptimizer::SOFT); 5766 } 5767 instr = BuildStoreKeyedGeneric(obj, key, val); 5768 } else { 5769 if (expr->AsProperty()->IsUninitialized()) { 5770 Add<HDeoptimize>("Insufficient feedback for keyed load", 5771 Deoptimizer::SOFT); 5772 } 5773 instr = BuildLoadKeyedGeneric(obj, key); 5774 } 5775 AddInstruction(instr); 5776 } 5777 if (position != RelocInfo::kNoPosition) instr->set_position(position); 5778 *has_side_effects = instr->HasObservableSideEffects(); 5779 return instr; 5780 } 5781 5782 5783 HInstruction* HOptimizedGraphBuilder::BuildStoreKeyedGeneric( 5784 HValue* object, 5785 HValue* key, 5786 HValue* value) { 5787 HValue* context = environment()->context(); 5788 return new(zone()) HStoreKeyedGeneric( 5789 context, 5790 object, 5791 key, 5792 value, 5793 function_strict_mode_flag()); 5794 } 5795 5796 5797 void HOptimizedGraphBuilder::EnsureArgumentsArePushedForAccess() { 5798 // Outermost function already has arguments on the stack. 5799 if (function_state()->outer() == NULL) return; 5800 5801 if (function_state()->arguments_pushed()) return; 5802 5803 // Push arguments when entering inlined function. 5804 HEnterInlined* entry = function_state()->entry(); 5805 entry->set_arguments_pushed(); 5806 5807 HArgumentsObject* arguments = entry->arguments_object(); 5808 const ZoneList<HValue*>* arguments_values = arguments->arguments_values(); 5809 5810 HInstruction* insert_after = entry; 5811 for (int i = 0; i < arguments_values->length(); i++) { 5812 HValue* argument = arguments_values->at(i); 5813 HInstruction* push_argument = New<HPushArgument>(argument); 5814 push_argument->InsertAfter(insert_after); 5815 insert_after = push_argument; 5816 } 5817 5818 HArgumentsElements* arguments_elements = New<HArgumentsElements>(true); 5819 arguments_elements->ClearFlag(HValue::kUseGVN); 5820 arguments_elements->InsertAfter(insert_after); 5821 function_state()->set_arguments_elements(arguments_elements); 5822 } 5823 5824 5825 bool HOptimizedGraphBuilder::TryArgumentsAccess(Property* expr) { 5826 VariableProxy* proxy = expr->obj()->AsVariableProxy(); 5827 if (proxy == NULL) return false; 5828 if (!proxy->var()->IsStackAllocated()) return false; 5829 if (!environment()->Lookup(proxy->var())->CheckFlag(HValue::kIsArguments)) { 5830 return false; 5831 } 5832 5833 HInstruction* result = NULL; 5834 if (expr->key()->IsPropertyName()) { 5835 Handle<String> name = expr->key()->AsLiteral()->AsPropertyName(); 5836 if (!name->IsOneByteEqualTo(STATIC_ASCII_VECTOR("length"))) return false; 5837 5838 if (function_state()->outer() == NULL) { 5839 HInstruction* elements = Add<HArgumentsElements>(false); 5840 result = New<HArgumentsLength>(elements); 5841 } else { 5842 // Number of arguments without receiver. 5843 int argument_count = environment()-> 5844 arguments_environment()->parameter_count() - 1; 5845 result = New<HConstant>(argument_count); 5846 } 5847 } else { 5848 Push(graph()->GetArgumentsObject()); 5849 VisitForValue(expr->key()); 5850 if (HasStackOverflow() || current_block() == NULL) return true; 5851 HValue* key = Pop(); 5852 Drop(1); // Arguments object. 5853 if (function_state()->outer() == NULL) { 5854 HInstruction* elements = Add<HArgumentsElements>(false); 5855 HInstruction* length = Add<HArgumentsLength>(elements); 5856 HInstruction* checked_key = Add<HBoundsCheck>(key, length); 5857 result = new(zone()) HAccessArgumentsAt(elements, length, checked_key); 5858 } else { 5859 EnsureArgumentsArePushedForAccess(); 5860 5861 // Number of arguments without receiver. 5862 HInstruction* elements = function_state()->arguments_elements(); 5863 int argument_count = environment()-> 5864 arguments_environment()->parameter_count() - 1; 5865 HInstruction* length = Add<HConstant>(argument_count); 5866 HInstruction* checked_key = Add<HBoundsCheck>(key, length); 5867 result = new(zone()) HAccessArgumentsAt(elements, length, checked_key); 5868 } 5869 } 5870 ast_context()->ReturnInstruction(result, expr->id()); 5871 return true; 5872 } 5873 5874 5875 void HOptimizedGraphBuilder::VisitProperty(Property* expr) { 5876 ASSERT(!HasStackOverflow()); 5877 ASSERT(current_block() != NULL); 5878 ASSERT(current_block()->HasPredecessor()); 5879 5880 if (TryArgumentsAccess(expr)) return; 5881 5882 CHECK_ALIVE(VisitForValue(expr->obj())); 5883 5884 HInstruction* instr = NULL; 5885 if (expr->IsStringLength()) { 5886 HValue* string = Pop(); 5887 BuildCheckHeapObject(string); 5888 HInstruction* checkstring = 5889 AddInstruction(HCheckInstanceType::NewIsString(string, zone())); 5890 instr = BuildLoadStringLength(string, checkstring); 5891 } else if (expr->IsStringAccess()) { 5892 CHECK_ALIVE(VisitForValue(expr->key())); 5893 HValue* index = Pop(); 5894 HValue* string = Pop(); 5895 HValue* context = environment()->context(); 5896 HInstruction* char_code = 5897 BuildStringCharCodeAt(string, index); 5898 AddInstruction(char_code); 5899 instr = HStringCharFromCode::New(zone(), context, char_code); 5900 5901 } else if (expr->IsFunctionPrototype()) { 5902 HValue* function = Pop(); 5903 BuildCheckHeapObject(function); 5904 instr = new(zone()) HLoadFunctionPrototype(function); 5905 5906 } else if (expr->key()->IsPropertyName()) { 5907 Handle<String> name = expr->key()->AsLiteral()->AsPropertyName(); 5908 SmallMapList* types = expr->GetReceiverTypes(); 5909 HValue* object = Top(); 5910 5911 Handle<Map> map; 5912 bool monomorphic = false; 5913 if (expr->IsMonomorphic()) { 5914 map = types->first(); 5915 monomorphic = !map->is_dictionary_map(); 5916 } else if (object->HasMonomorphicJSObjectType()) { 5917 map = object->GetMonomorphicJSObjectMap(); 5918 monomorphic = !map->is_dictionary_map(); 5919 } 5920 if (monomorphic) { 5921 Handle<JSFunction> getter; 5922 Handle<JSObject> holder; 5923 if (LookupGetter(map, name, &getter, &holder)) { 5924 AddCheckConstantFunction(holder, Top(), map); 5925 if (FLAG_inline_accessors && TryInlineGetter(getter, expr)) return; 5926 Add<HPushArgument>(Pop()); 5927 instr = new(zone()) HCallConstantFunction(getter, 1); 5928 } else { 5929 instr = BuildLoadNamedMonomorphic(Pop(), name, expr, map); 5930 } 5931 } else if (types != NULL && types->length() > 1) { 5932 return HandlePolymorphicLoadNamedField(expr, Pop(), types, name); 5933 } else { 5934 instr = BuildLoadNamedGeneric(Pop(), name, expr); 5935 } 5936 5937 } else { 5938 CHECK_ALIVE(VisitForValue(expr->key())); 5939 5940 HValue* key = Pop(); 5941 HValue* obj = Pop(); 5942 5943 bool has_side_effects = false; 5944 HValue* load = HandleKeyedElementAccess( 5945 obj, key, NULL, expr, expr->id(), expr->position(), 5946 false, // is_store 5947 &has_side_effects); 5948 if (has_side_effects) { 5949 if (ast_context()->IsEffect()) { 5950 Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE); 5951 } else { 5952 Push(load); 5953 Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE); 5954 Drop(1); 5955 } 5956 } 5957 return ast_context()->ReturnValue(load); 5958 } 5959 instr->set_position(expr->position()); 5960 return ast_context()->ReturnInstruction(instr, expr->id()); 5961 } 5962 5963 5964 void HGraphBuilder::BuildConstantMapCheck(Handle<JSObject> constant, 5965 CompilationInfo* info) { 5966 HConstant* constant_value = New<HConstant>(constant); 5967 5968 if (constant->map()->CanOmitMapChecks()) { 5969 constant->map()->AddDependentCompilationInfo( 5970 DependentCode::kPrototypeCheckGroup, info); 5971 return; 5972 } 5973 5974 AddInstruction(constant_value); 5975 HCheckMaps* check = 5976 Add<HCheckMaps>(constant_value, handle(constant->map()), info); 5977 check->ClearGVNFlag(kDependsOnElementsKind); 5978 } 5979 5980 5981 void HGraphBuilder::BuildCheckPrototypeMaps(Handle<JSObject> prototype, 5982 Handle<JSObject> holder) { 5983 BuildConstantMapCheck(prototype, top_info()); 5984 while (!prototype.is_identical_to(holder)) { 5985 prototype = handle(JSObject::cast(prototype->GetPrototype())); 5986 BuildConstantMapCheck(prototype, top_info()); 5987 } 5988 } 5989 5990 5991 void HOptimizedGraphBuilder::AddCheckPrototypeMaps(Handle<JSObject> holder, 5992 Handle<Map> receiver_map) { 5993 if (!holder.is_null()) { 5994 Handle<JSObject> prototype(JSObject::cast(receiver_map->prototype())); 5995 BuildCheckPrototypeMaps(prototype, holder); 5996 } 5997 } 5998 5999 6000 void HOptimizedGraphBuilder::AddCheckConstantFunction( 6001 Handle<JSObject> holder, 6002 HValue* receiver, 6003 Handle<Map> receiver_map) { 6004 // Constant functions have the nice property that the map will change if they 6005 // are overwritten. Therefore it is enough to check the map of the holder and 6006 // its prototypes. 6007 AddCheckMap(receiver, receiver_map); 6008 AddCheckPrototypeMaps(holder, receiver_map); 6009 } 6010 6011 6012 class FunctionSorter { 6013 public: 6014 FunctionSorter() : index_(0), ticks_(0), ast_length_(0), src_length_(0) { } 6015 FunctionSorter(int index, int ticks, int ast_length, int src_length) 6016 : index_(index), 6017 ticks_(ticks), 6018 ast_length_(ast_length), 6019 src_length_(src_length) { } 6020 6021 int index() const { return index_; } 6022 int ticks() const { return ticks_; } 6023 int ast_length() const { return ast_length_; } 6024 int src_length() const { return src_length_; } 6025 6026 private: 6027 int index_; 6028 int ticks_; 6029 int ast_length_; 6030 int src_length_; 6031 }; 6032 6033 6034 inline bool operator<(const FunctionSorter& lhs, const FunctionSorter& rhs) { 6035 int diff = lhs.ticks() - rhs.ticks(); 6036 if (diff != 0) return diff > 0; 6037 diff = lhs.ast_length() - rhs.ast_length(); 6038 if (diff != 0) return diff < 0; 6039 return lhs.src_length() < rhs.src_length(); 6040 } 6041 6042 6043 bool HOptimizedGraphBuilder::TryCallPolymorphicAsMonomorphic( 6044 Call* expr, 6045 HValue* receiver, 6046 SmallMapList* types, 6047 Handle<String> name) { 6048 if (types->length() > kMaxCallPolymorphism) return false; 6049 6050 Handle<Map> map(types->at(0)); 6051 LookupResult lookup(isolate()); 6052 if (!CanLoadPropertyFromPrototype(map, name, &lookup)) return false; 6053 6054 Handle<Object> prototype(map->prototype(), isolate()); 6055 for (int count = 1; count < types->length(); ++count) { 6056 Handle<Map> test_map(types->at(count)); 6057 if (!CanLoadPropertyFromPrototype(test_map, name, &lookup)) return false; 6058 if (test_map->prototype() != *prototype) return false; 6059 } 6060 6061 if (!expr->ComputeTarget(map, name)) return false; 6062 6063 BuildCheckHeapObject(receiver); 6064 Add<HCheckMaps>(receiver, types); 6065 AddCheckPrototypeMaps(expr->holder(), map); 6066 if (FLAG_trace_inlining) { 6067 Handle<JSFunction> caller = current_info()->closure(); 6068 SmartArrayPointer<char> caller_name = 6069 caller->shared()->DebugName()->ToCString(); 6070 PrintF("Trying to inline the polymorphic call to %s from %s\n", 6071 *name->ToCString(), *caller_name); 6072 } 6073 6074 if (!TryInlineCall(expr)) { 6075 int argument_count = expr->arguments()->length() + 1; // Includes receiver. 6076 HCallConstantFunction* call = 6077 new(zone()) HCallConstantFunction(expr->target(), argument_count); 6078 call->set_position(expr->position()); 6079 PreProcessCall(call); 6080 AddInstruction(call); 6081 if (!ast_context()->IsEffect()) Push(call); 6082 Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE); 6083 if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop()); 6084 } 6085 6086 return true; 6087 } 6088 6089 6090 void HOptimizedGraphBuilder::HandlePolymorphicCallNamed( 6091 Call* expr, 6092 HValue* receiver, 6093 SmallMapList* types, 6094 Handle<String> name) { 6095 if (TryCallPolymorphicAsMonomorphic(expr, receiver, types, name)) return; 6096 6097 int argument_count = expr->arguments()->length() + 1; // Includes receiver. 6098 HBasicBlock* join = NULL; 6099 FunctionSorter order[kMaxCallPolymorphism]; 6100 int ordered_functions = 0; 6101 6102 Handle<Map> initial_string_map( 6103 isolate()->native_context()->string_function()->initial_map()); 6104 Handle<Map> string_marker_map( 6105 JSObject::cast(initial_string_map->prototype())->map()); 6106 Handle<Map> initial_number_map( 6107 isolate()->native_context()->number_function()->initial_map()); 6108 Handle<Map> number_marker_map( 6109 JSObject::cast(initial_number_map->prototype())->map()); 6110 Handle<Map> heap_number_map = isolate()->factory()->heap_number_map(); 6111 6112 bool handle_smi = false; 6113 6114 for (int i = 0; 6115 i < types->length() && ordered_functions < kMaxCallPolymorphism; 6116 ++i) { 6117 Handle<Map> map = types->at(i); 6118 if (expr->ComputeTarget(map, name)) { 6119 if (map.is_identical_to(number_marker_map)) handle_smi = true; 6120 order[ordered_functions++] = 6121 FunctionSorter(i, 6122 expr->target()->shared()->profiler_ticks(), 6123 InliningAstSize(expr->target()), 6124 expr->target()->shared()->SourceSize()); 6125 } 6126 } 6127 6128 std::sort(order, order + ordered_functions); 6129 6130 HBasicBlock* number_block = NULL; 6131 6132 for (int fn = 0; fn < ordered_functions; ++fn) { 6133 int i = order[fn].index(); 6134 Handle<Map> map = types->at(i); 6135 if (fn == 0) { 6136 // Only needed once. 6137 join = graph()->CreateBasicBlock(); 6138 if (handle_smi) { 6139 HBasicBlock* empty_smi_block = graph()->CreateBasicBlock(); 6140 HBasicBlock* not_smi_block = graph()->CreateBasicBlock(); 6141 number_block = graph()->CreateBasicBlock(); 6142 HIsSmiAndBranch* smicheck = new(zone()) HIsSmiAndBranch(receiver); 6143 smicheck->SetSuccessorAt(0, empty_smi_block); 6144 smicheck->SetSuccessorAt(1, not_smi_block); 6145 current_block()->Finish(smicheck); 6146 empty_smi_block->Goto(number_block); 6147 set_current_block(not_smi_block); 6148 } else { 6149 BuildCheckHeapObject(receiver); 6150 } 6151 } 6152 HBasicBlock* if_true = graph()->CreateBasicBlock(); 6153 HBasicBlock* if_false = graph()->CreateBasicBlock(); 6154 HUnaryControlInstruction* compare; 6155 6156 if (handle_smi && map.is_identical_to(number_marker_map)) { 6157 compare = new(zone()) HCompareMap( 6158 receiver, heap_number_map, if_true, if_false); 6159 map = initial_number_map; 6160 expr->set_number_check( 6161 Handle<JSObject>(JSObject::cast(map->prototype()))); 6162 } else if (map.is_identical_to(string_marker_map)) { 6163 compare = new(zone()) HIsStringAndBranch(receiver); 6164 compare->SetSuccessorAt(0, if_true); 6165 compare->SetSuccessorAt(1, if_false); 6166 map = initial_string_map; 6167 expr->set_string_check( 6168 Handle<JSObject>(JSObject::cast(map->prototype()))); 6169 } else { 6170 compare = new(zone()) HCompareMap(receiver, map, if_true, if_false); 6171 expr->set_map_check(); 6172 } 6173 6174 current_block()->Finish(compare); 6175 6176 if (expr->check_type() == NUMBER_CHECK) { 6177 if_true->Goto(number_block); 6178 if_true = number_block; 6179 number_block->SetJoinId(expr->id()); 6180 } 6181 set_current_block(if_true); 6182 6183 expr->ComputeTarget(map, name); 6184 AddCheckPrototypeMaps(expr->holder(), map); 6185 if (FLAG_trace_inlining && FLAG_polymorphic_inlining) { 6186 Handle<JSFunction> caller = current_info()->closure(); 6187 SmartArrayPointer<char> caller_name = 6188 caller->shared()->DebugName()->ToCString(); 6189 PrintF("Trying to inline the polymorphic call to %s from %s\n", 6190 *name->ToCString(), 6191 *caller_name); 6192 } 6193 if (FLAG_polymorphic_inlining && TryInlineCall(expr)) { 6194 // Trying to inline will signal that we should bailout from the 6195 // entire compilation by setting stack overflow on the visitor. 6196 if (HasStackOverflow()) return; 6197 } else { 6198 HCallConstantFunction* call = 6199 new(zone()) HCallConstantFunction(expr->target(), argument_count); 6200 call->set_position(expr->position()); 6201 PreProcessCall(call); 6202 AddInstruction(call); 6203 if (!ast_context()->IsEffect()) Push(call); 6204 } 6205 6206 if (current_block() != NULL) current_block()->Goto(join); 6207 set_current_block(if_false); 6208 } 6209 6210 // Finish up. Unconditionally deoptimize if we've handled all the maps we 6211 // know about and do not want to handle ones we've never seen. Otherwise 6212 // use a generic IC. 6213 if (ordered_functions == types->length() && FLAG_deoptimize_uncommon_cases) { 6214 // Because the deopt may be the only path in the polymorphic call, make sure 6215 // that the environment stack matches the depth on deopt that it otherwise 6216 // would have had after a successful call. 6217 Drop(argument_count - (ast_context()->IsEffect() ? 0 : 1)); 6218 FinishExitWithHardDeoptimization("Unknown map in polymorphic call", join); 6219 } else { 6220 HValue* context = environment()->context(); 6221 HCallNamed* call = new(zone()) HCallNamed(context, name, argument_count); 6222 call->set_position(expr->position()); 6223 PreProcessCall(call); 6224 6225 if (join != NULL) { 6226 AddInstruction(call); 6227 if (!ast_context()->IsEffect()) Push(call); 6228 current_block()->Goto(join); 6229 } else { 6230 return ast_context()->ReturnInstruction(call, expr->id()); 6231 } 6232 } 6233 6234 // We assume that control flow is always live after an expression. So 6235 // even without predecessors to the join block, we set it as the exit 6236 // block and continue by adding instructions there. 6237 ASSERT(join != NULL); 6238 if (join->HasPredecessor()) { 6239 set_current_block(join); 6240 join->SetJoinId(expr->id()); 6241 if (!ast_context()->IsEffect()) return ast_context()->ReturnValue(Pop()); 6242 } else { 6243 set_current_block(NULL); 6244 } 6245 } 6246 6247 6248 void HOptimizedGraphBuilder::TraceInline(Handle<JSFunction> target, 6249 Handle<JSFunction> caller, 6250 const char* reason) { 6251 if (FLAG_trace_inlining) { 6252 SmartArrayPointer<char> target_name = 6253 target->shared()->DebugName()->ToCString(); 6254 SmartArrayPointer<char> caller_name = 6255 caller->shared()->DebugName()->ToCString(); 6256 if (reason == NULL) { 6257 PrintF("Inlined %s called from %s.\n", *target_name, *caller_name); 6258 } else { 6259 PrintF("Did not inline %s called from %s (%s).\n", 6260 *target_name, *caller_name, reason); 6261 } 6262 } 6263 } 6264 6265 6266 static const int kNotInlinable = 1000000000; 6267 6268 6269 int HOptimizedGraphBuilder::InliningAstSize(Handle<JSFunction> target) { 6270 if (!FLAG_use_inlining) return kNotInlinable; 6271 6272 // Precondition: call is monomorphic and we have found a target with the 6273 // appropriate arity. 6274 Handle<JSFunction> caller = current_info()->closure(); 6275 Handle<SharedFunctionInfo> target_shared(target->shared()); 6276 6277 // Do a quick check on source code length to avoid parsing large 6278 // inlining candidates. 6279 if (target_shared->SourceSize() > 6280 Min(FLAG_max_inlined_source_size, kUnlimitedMaxInlinedSourceSize)) { 6281 TraceInline(target, caller, "target text too big"); 6282 return kNotInlinable; 6283 } 6284 6285 // Target must be inlineable. 6286 if (!target->IsInlineable()) { 6287 TraceInline(target, caller, "target not inlineable"); 6288 return kNotInlinable; 6289 } 6290 if (target_shared->dont_inline() || target_shared->dont_optimize()) { 6291 TraceInline(target, caller, "target contains unsupported syntax [early]"); 6292 return kNotInlinable; 6293 } 6294 6295 int nodes_added = target_shared->ast_node_count(); 6296 return nodes_added; 6297 } 6298 6299 6300 bool HOptimizedGraphBuilder::TryInline(CallKind call_kind, 6301 Handle<JSFunction> target, 6302 int arguments_count, 6303 HValue* implicit_return_value, 6304 BailoutId ast_id, 6305 BailoutId return_id, 6306 InliningKind inlining_kind) { 6307 int nodes_added = InliningAstSize(target); 6308 if (nodes_added == kNotInlinable) return false; 6309 6310 Handle<JSFunction> caller = current_info()->closure(); 6311 6312 if (nodes_added > Min(FLAG_max_inlined_nodes, kUnlimitedMaxInlinedNodes)) { 6313 TraceInline(target, caller, "target AST is too large [early]"); 6314 return false; 6315 } 6316 6317 #if !V8_TARGET_ARCH_IA32 6318 // Target must be able to use caller's context. 6319 CompilationInfo* outer_info = current_info(); 6320 if (target->context() != outer_info->closure()->context() || 6321 outer_info->scope()->contains_with() || 6322 outer_info->scope()->num_heap_slots() > 0) { 6323 TraceInline(target, caller, "target requires context change"); 6324 return false; 6325 } 6326 #endif 6327 6328 6329 // Don't inline deeper than the maximum number of inlining levels. 6330 HEnvironment* env = environment(); 6331 int current_level = 1; 6332 while (env->outer() != NULL) { 6333 if (current_level == FLAG_max_inlining_levels) { 6334 TraceInline(target, caller, "inline depth limit reached"); 6335 return false; 6336 } 6337 if (env->outer()->frame_type() == JS_FUNCTION) { 6338 current_level++; 6339 } 6340 env = env->outer(); 6341 } 6342 6343 // Don't inline recursive functions. 6344 for (FunctionState* state = function_state(); 6345 state != NULL; 6346 state = state->outer()) { 6347 if (*state->compilation_info()->closure() == *target) { 6348 TraceInline(target, caller, "target is recursive"); 6349 return false; 6350 } 6351 } 6352 6353 // We don't want to add more than a certain number of nodes from inlining. 6354 if (inlined_count_ > Min(FLAG_max_inlined_nodes_cumulative, 6355 kUnlimitedMaxInlinedNodesCumulative)) { 6356 TraceInline(target, caller, "cumulative AST node limit reached"); 6357 return false; 6358 } 6359 6360 // Parse and allocate variables. 6361 CompilationInfo target_info(target, zone()); 6362 Handle<SharedFunctionInfo> target_shared(target->shared()); 6363 if (!Parser::Parse(&target_info) || !Scope::Analyze(&target_info)) { 6364 if (target_info.isolate()->has_pending_exception()) { 6365 // Parse or scope error, never optimize this function. 6366 SetStackOverflow(); 6367 target_shared->DisableOptimization(kParseScopeError); 6368 } 6369 TraceInline(target, caller, "parse failure"); 6370 return false; 6371 } 6372 6373 if (target_info.scope()->num_heap_slots() > 0) { 6374 TraceInline(target, caller, "target has context-allocated variables"); 6375 return false; 6376 } 6377 FunctionLiteral* function = target_info.function(); 6378 6379 // The following conditions must be checked again after re-parsing, because 6380 // earlier the information might not have been complete due to lazy parsing. 6381 nodes_added = function->ast_node_count(); 6382 if (nodes_added > Min(FLAG_max_inlined_nodes, kUnlimitedMaxInlinedNodes)) { 6383 TraceInline(target, caller, "target AST is too large [late]"); 6384 return false; 6385 } 6386 AstProperties::Flags* flags(function->flags()); 6387 if (flags->Contains(kDontInline) || flags->Contains(kDontOptimize)) { 6388 TraceInline(target, caller, "target contains unsupported syntax [late]"); 6389 return false; 6390 } 6391 6392 // If the function uses the arguments object check that inlining of functions 6393 // with arguments object is enabled and the arguments-variable is 6394 // stack allocated. 6395 if (function->scope()->arguments() != NULL) { 6396 if (!FLAG_inline_arguments) { 6397 TraceInline(target, caller, "target uses arguments object"); 6398 return false; 6399 } 6400 6401 if (!function->scope()->arguments()->IsStackAllocated()) { 6402 TraceInline(target, 6403 caller, 6404 "target uses non-stackallocated arguments object"); 6405 return false; 6406 } 6407 } 6408 6409 // All declarations must be inlineable. 6410 ZoneList<Declaration*>* decls = target_info.scope()->declarations(); 6411 int decl_count = decls->length(); 6412 for (int i = 0; i < decl_count; ++i) { 6413 if (!decls->at(i)->IsInlineable()) { 6414 TraceInline(target, caller, "target has non-trivial declaration"); 6415 return false; 6416 } 6417 } 6418 6419 // Generate the deoptimization data for the unoptimized version of 6420 // the target function if we don't already have it. 6421 if (!target_shared->has_deoptimization_support()) { 6422 // Note that we compile here using the same AST that we will use for 6423 // generating the optimized inline code. 6424 target_info.EnableDeoptimizationSupport(); 6425 if (!FullCodeGenerator::MakeCode(&target_info)) { 6426 TraceInline(target, caller, "could not generate deoptimization info"); 6427 return false; 6428 } 6429 if (target_shared->scope_info() == ScopeInfo::Empty(isolate())) { 6430 // The scope info might not have been set if a lazily compiled 6431 // function is inlined before being called for the first time. 6432 Handle<ScopeInfo> target_scope_info = 6433 ScopeInfo::Create(target_info.scope(), zone()); 6434 target_shared->set_scope_info(*target_scope_info); 6435 } 6436 target_shared->EnableDeoptimizationSupport(*target_info.code()); 6437 Compiler::RecordFunctionCompilation(Logger::FUNCTION_TAG, 6438 &target_info, 6439 target_shared); 6440 } 6441 6442 // ---------------------------------------------------------------- 6443 // After this point, we've made a decision to inline this function (so 6444 // TryInline should always return true). 6445 6446 // Type-check the inlined function. 6447 ASSERT(target_shared->has_deoptimization_support()); 6448 AstTyper::Run(&target_info); 6449 6450 // Save the pending call context. Set up new one for the inlined function. 6451 // The function state is new-allocated because we need to delete it 6452 // in two different places. 6453 FunctionState* target_state = new FunctionState( 6454 this, &target_info, inlining_kind); 6455 6456 HConstant* undefined = graph()->GetConstantUndefined(); 6457 bool undefined_receiver = HEnvironment::UseUndefinedReceiver( 6458 target, function, call_kind, inlining_kind); 6459 HEnvironment* inner_env = 6460 environment()->CopyForInlining(target, 6461 arguments_count, 6462 function, 6463 undefined, 6464 function_state()->inlining_kind(), 6465 undefined_receiver); 6466 #if V8_TARGET_ARCH_IA32 6467 // IA32 only, overwrite the caller's context in the deoptimization 6468 // environment with the correct one. 6469 // 6470 // TODO(kmillikin): implement the same inlining on other platforms so we 6471 // can remove the unsightly ifdefs in this function. 6472 HConstant* context = Add<HConstant>(Handle<Context>(target->context())); 6473 inner_env->BindContext(context); 6474 #endif 6475 6476 Add<HSimulate>(return_id); 6477 current_block()->UpdateEnvironment(inner_env); 6478 HArgumentsObject* arguments_object = NULL; 6479 6480 // If the function uses arguments object create and bind one, also copy 6481 // current arguments values to use them for materialization. 6482 if (function->scope()->arguments() != NULL) { 6483 ASSERT(function->scope()->arguments()->IsStackAllocated()); 6484 HEnvironment* arguments_env = inner_env->arguments_environment(); 6485 int arguments_count = arguments_env->parameter_count(); 6486 arguments_object = Add<HArgumentsObject>(arguments_count); 6487 inner_env->Bind(function->scope()->arguments(), arguments_object); 6488 for (int i = 0; i < arguments_count; i++) { 6489 arguments_object->AddArgument(arguments_env->Lookup(i), zone()); 6490 } 6491 } 6492 6493 HEnterInlined* enter_inlined = 6494 Add<HEnterInlined>(target, arguments_count, function, 6495 function_state()->inlining_kind(), 6496 function->scope()->arguments(), 6497 arguments_object, undefined_receiver); 6498 function_state()->set_entry(enter_inlined); 6499 6500 VisitDeclarations(target_info.scope()->declarations()); 6501 VisitStatements(function->body()); 6502 if (HasStackOverflow()) { 6503 // Bail out if the inline function did, as we cannot residualize a call 6504 // instead. 6505 TraceInline(target, caller, "inline graph construction failed"); 6506 target_shared->DisableOptimization(kInliningBailedOut); 6507 inline_bailout_ = true; 6508 delete target_state; 6509 return true; 6510 } 6511 6512 // Update inlined nodes count. 6513 inlined_count_ += nodes_added; 6514 6515 Handle<Code> unoptimized_code(target_shared->code()); 6516 ASSERT(unoptimized_code->kind() == Code::FUNCTION); 6517 Handle<TypeFeedbackInfo> type_info( 6518 TypeFeedbackInfo::cast(unoptimized_code->type_feedback_info())); 6519 graph()->update_type_change_checksum(type_info->own_type_change_checksum()); 6520 6521 TraceInline(target, caller, NULL); 6522 6523 if (current_block() != NULL) { 6524 FunctionState* state = function_state(); 6525 if (state->inlining_kind() == CONSTRUCT_CALL_RETURN) { 6526 // Falling off the end of an inlined construct call. In a test context the 6527 // return value will always evaluate to true, in a value context the 6528 // return value is the newly allocated receiver. 6529 if (call_context()->IsTest()) { 6530 current_block()->Goto(inlined_test_context()->if_true(), state); 6531 } else if (call_context()->IsEffect()) { 6532 current_block()->Goto(function_return(), state); 6533 } else { 6534 ASSERT(call_context()->IsValue()); 6535 current_block()->AddLeaveInlined(implicit_return_value, state); 6536 } 6537 } else if (state->inlining_kind() == SETTER_CALL_RETURN) { 6538 // Falling off the end of an inlined setter call. The returned value is 6539 // never used, the value of an assignment is always the value of the RHS 6540 // of the assignment. 6541 if (call_context()->IsTest()) { 6542 inlined_test_context()->ReturnValue(implicit_return_value); 6543 } else if (call_context()->IsEffect()) { 6544 current_block()->Goto(function_return(), state); 6545 } else { 6546 ASSERT(call_context()->IsValue()); 6547 current_block()->AddLeaveInlined(implicit_return_value, state); 6548 } 6549 } else { 6550 // Falling off the end of a normal inlined function. This basically means 6551 // returning undefined. 6552 if (call_context()->IsTest()) { 6553 current_block()->Goto(inlined_test_context()->if_false(), state); 6554 } else if (call_context()->IsEffect()) { 6555 current_block()->Goto(function_return(), state); 6556 } else { 6557 ASSERT(call_context()->IsValue()); 6558 current_block()->AddLeaveInlined(undefined, state); 6559 } 6560 } 6561 } 6562 6563 // Fix up the function exits. 6564 if (inlined_test_context() != NULL) { 6565 HBasicBlock* if_true = inlined_test_context()->if_true(); 6566 HBasicBlock* if_false = inlined_test_context()->if_false(); 6567 6568 HEnterInlined* entry = function_state()->entry(); 6569 6570 // Pop the return test context from the expression context stack. 6571 ASSERT(ast_context() == inlined_test_context()); 6572 ClearInlinedTestContext(); 6573 delete target_state; 6574 6575 // Forward to the real test context. 6576 if (if_true->HasPredecessor()) { 6577 entry->RegisterReturnTarget(if_true, zone()); 6578 if_true->SetJoinId(ast_id); 6579 HBasicBlock* true_target = TestContext::cast(ast_context())->if_true(); 6580 if_true->Goto(true_target, function_state()); 6581 } 6582 if (if_false->HasPredecessor()) { 6583 entry->RegisterReturnTarget(if_false, zone()); 6584 if_false->SetJoinId(ast_id); 6585 HBasicBlock* false_target = TestContext::cast(ast_context())->if_false(); 6586 if_false->Goto(false_target, function_state()); 6587 } 6588 set_current_block(NULL); 6589 return true; 6590 6591 } else if (function_return()->HasPredecessor()) { 6592 function_state()->entry()->RegisterReturnTarget(function_return(), zone()); 6593 function_return()->SetJoinId(ast_id); 6594 set_current_block(function_return()); 6595 } else { 6596 set_current_block(NULL); 6597 } 6598 delete target_state; 6599 return true; 6600 } 6601 6602 6603 bool HOptimizedGraphBuilder::TryInlineCall(Call* expr, bool drop_extra) { 6604 // The function call we are inlining is a method call if the call 6605 // is a property call. 6606 CallKind call_kind = (expr->expression()->AsProperty() == NULL) 6607 ? CALL_AS_FUNCTION 6608 : CALL_AS_METHOD; 6609 6610 return TryInline(call_kind, 6611 expr->target(), 6612 expr->arguments()->length(), 6613 NULL, 6614 expr->id(), 6615 expr->ReturnId(), 6616 drop_extra ? DROP_EXTRA_ON_RETURN : NORMAL_RETURN); 6617 } 6618 6619 6620 bool HOptimizedGraphBuilder::TryInlineConstruct(CallNew* expr, 6621 HValue* implicit_return_value) { 6622 return TryInline(CALL_AS_FUNCTION, 6623 expr->target(), 6624 expr->arguments()->length(), 6625 implicit_return_value, 6626 expr->id(), 6627 expr->ReturnId(), 6628 CONSTRUCT_CALL_RETURN); 6629 } 6630 6631 6632 bool HOptimizedGraphBuilder::TryInlineGetter(Handle<JSFunction> getter, 6633 Property* prop) { 6634 return TryInline(CALL_AS_METHOD, 6635 getter, 6636 0, 6637 NULL, 6638 prop->id(), 6639 prop->LoadId(), 6640 GETTER_CALL_RETURN); 6641 } 6642 6643 6644 bool HOptimizedGraphBuilder::TryInlineSetter(Handle<JSFunction> setter, 6645 BailoutId id, 6646 BailoutId assignment_id, 6647 HValue* implicit_return_value) { 6648 return TryInline(CALL_AS_METHOD, 6649 setter, 6650 1, 6651 implicit_return_value, 6652 id, assignment_id, 6653 SETTER_CALL_RETURN); 6654 } 6655 6656 6657 bool HOptimizedGraphBuilder::TryInlineApply(Handle<JSFunction> function, 6658 Call* expr, 6659 int arguments_count) { 6660 return TryInline(CALL_AS_METHOD, 6661 function, 6662 arguments_count, 6663 NULL, 6664 expr->id(), 6665 expr->ReturnId(), 6666 NORMAL_RETURN); 6667 } 6668 6669 6670 bool HOptimizedGraphBuilder::TryInlineBuiltinFunctionCall(Call* expr, 6671 bool drop_extra) { 6672 if (!expr->target()->shared()->HasBuiltinFunctionId()) return false; 6673 BuiltinFunctionId id = expr->target()->shared()->builtin_function_id(); 6674 switch (id) { 6675 case kMathExp: 6676 if (!FLAG_fast_math) break; 6677 // Fall through if FLAG_fast_math. 6678 case kMathRound: 6679 case kMathFloor: 6680 case kMathAbs: 6681 case kMathSqrt: 6682 case kMathLog: 6683 case kMathSin: 6684 case kMathCos: 6685 case kMathTan: 6686 if (expr->arguments()->length() == 1) { 6687 HValue* argument = Pop(); 6688 HValue* context = environment()->context(); 6689 Drop(1); // Receiver. 6690 HInstruction* op = 6691 HUnaryMathOperation::New(zone(), context, argument, id); 6692 op->set_position(expr->position()); 6693 if (drop_extra) Drop(1); // Optionally drop the function. 6694 ast_context()->ReturnInstruction(op, expr->id()); 6695 return true; 6696 } 6697 break; 6698 case kMathImul: 6699 if (expr->arguments()->length() == 2) { 6700 HValue* right = Pop(); 6701 HValue* left = Pop(); 6702 Drop(1); // Receiver. 6703 HValue* context = environment()->context(); 6704 HInstruction* op = HMul::NewImul(zone(), context, left, right); 6705 if (drop_extra) Drop(1); // Optionally drop the function. 6706 ast_context()->ReturnInstruction(op, expr->id()); 6707 return true; 6708 } 6709 break; 6710 default: 6711 // Not supported for inlining yet. 6712 break; 6713 } 6714 return false; 6715 } 6716 6717 6718 bool HOptimizedGraphBuilder::TryInlineBuiltinMethodCall( 6719 Call* expr, 6720 HValue* receiver, 6721 Handle<Map> receiver_map, 6722 CheckType check_type) { 6723 ASSERT(check_type != RECEIVER_MAP_CHECK || !receiver_map.is_null()); 6724 // Try to inline calls like Math.* as operations in the calling function. 6725 if (!expr->target()->shared()->HasBuiltinFunctionId()) return false; 6726 BuiltinFunctionId id = expr->target()->shared()->builtin_function_id(); 6727 int argument_count = expr->arguments()->length() + 1; // Plus receiver. 6728 switch (id) { 6729 case kStringCharCodeAt: 6730 case kStringCharAt: 6731 if (argument_count == 2 && check_type == STRING_CHECK) { 6732 HValue* index = Pop(); 6733 HValue* string = Pop(); 6734 HValue* context = environment()->context(); 6735 ASSERT(!expr->holder().is_null()); 6736 BuildCheckPrototypeMaps(Call::GetPrototypeForPrimitiveCheck( 6737 STRING_CHECK, expr->holder()->GetIsolate()), 6738 expr->holder()); 6739 HInstruction* char_code = 6740 BuildStringCharCodeAt(string, index); 6741 if (id == kStringCharCodeAt) { 6742 ast_context()->ReturnInstruction(char_code, expr->id()); 6743 return true; 6744 } 6745 AddInstruction(char_code); 6746 HInstruction* result = 6747 HStringCharFromCode::New(zone(), context, char_code); 6748 ast_context()->ReturnInstruction(result, expr->id()); 6749 return true; 6750 } 6751 break; 6752 case kStringFromCharCode: 6753 if (argument_count == 2 && check_type == RECEIVER_MAP_CHECK) { 6754 AddCheckConstantFunction(expr->holder(), receiver, receiver_map); 6755 HValue* argument = Pop(); 6756 HValue* context = environment()->context(); 6757 Drop(1); // Receiver. 6758 HInstruction* result = 6759 HStringCharFromCode::New(zone(), context, argument); 6760 ast_context()->ReturnInstruction(result, expr->id()); 6761 return true; 6762 } 6763 break; 6764 case kMathExp: 6765 if (!FLAG_fast_math) break; 6766 // Fall through if FLAG_fast_math. 6767 case kMathRound: 6768 case kMathFloor: 6769 case kMathAbs: 6770 case kMathSqrt: 6771 case kMathLog: 6772 case kMathSin: 6773 case kMathCos: 6774 case kMathTan: 6775 if (argument_count == 2 && check_type == RECEIVER_MAP_CHECK) { 6776 AddCheckConstantFunction(expr->holder(), receiver, receiver_map); 6777 HValue* argument = Pop(); 6778 HValue* context = environment()->context(); 6779 Drop(1); // Receiver. 6780 HInstruction* op = 6781 HUnaryMathOperation::New(zone(), context, argument, id); 6782 op->set_position(expr->position()); 6783 ast_context()->ReturnInstruction(op, expr->id()); 6784 return true; 6785 } 6786 break; 6787 case kMathPow: 6788 if (argument_count == 3 && check_type == RECEIVER_MAP_CHECK) { 6789 AddCheckConstantFunction(expr->holder(), receiver, receiver_map); 6790 HValue* right = Pop(); 6791 HValue* left = Pop(); 6792 Pop(); // Pop receiver. 6793 HValue* context = environment()->context(); 6794 HInstruction* result = NULL; 6795 // Use sqrt() if exponent is 0.5 or -0.5. 6796 if (right->IsConstant() && HConstant::cast(right)->HasDoubleValue()) { 6797 double exponent = HConstant::cast(right)->DoubleValue(); 6798 if (exponent == 0.5) { 6799 result = 6800 HUnaryMathOperation::New(zone(), context, left, kMathPowHalf); 6801 } else if (exponent == -0.5) { 6802 HValue* one = graph()->GetConstant1(); 6803 HInstruction* sqrt = 6804 HUnaryMathOperation::New(zone(), context, left, kMathPowHalf); 6805 AddInstruction(sqrt); 6806 // MathPowHalf doesn't have side effects so there's no need for 6807 // an environment simulation here. 6808 ASSERT(!sqrt->HasObservableSideEffects()); 6809 result = HDiv::New(zone(), context, one, sqrt); 6810 } else if (exponent == 2.0) { 6811 result = HMul::New(zone(), context, left, left); 6812 } 6813 } else if (right->EqualsInteger32Constant(2)) { 6814 result = HMul::New(zone(), context, left, left); 6815 } 6816 6817 if (result == NULL) { 6818 result = HPower::New(zone(), context, left, right); 6819 } 6820 ast_context()->ReturnInstruction(result, expr->id()); 6821 return true; 6822 } 6823 break; 6824 case kMathRandom: 6825 if (argument_count == 1 && check_type == RECEIVER_MAP_CHECK) { 6826 AddCheckConstantFunction(expr->holder(), receiver, receiver_map); 6827 Drop(1); // Receiver. 6828 HGlobalObject* global_object = Add<HGlobalObject>(); 6829 HRandom* result = new(zone()) HRandom(global_object); 6830 ast_context()->ReturnInstruction(result, expr->id()); 6831 return true; 6832 } 6833 break; 6834 case kMathMax: 6835 case kMathMin: 6836 if (argument_count == 3 && check_type == RECEIVER_MAP_CHECK) { 6837 AddCheckConstantFunction(expr->holder(), receiver, receiver_map); 6838 HValue* right = Pop(); 6839 HValue* left = Pop(); 6840 Drop(1); // Receiver. 6841 HValue* context = environment()->context(); 6842 HMathMinMax::Operation op = (id == kMathMin) ? HMathMinMax::kMathMin 6843 : HMathMinMax::kMathMax; 6844 HInstruction* result = 6845 HMathMinMax::New(zone(), context, left, right, op); 6846 ast_context()->ReturnInstruction(result, expr->id()); 6847 return true; 6848 } 6849 break; 6850 case kMathImul: 6851 if (argument_count == 3 && check_type == RECEIVER_MAP_CHECK) { 6852 AddCheckConstantFunction(expr->holder(), receiver, receiver_map); 6853 HValue* right = Pop(); 6854 HValue* left = Pop(); 6855 Drop(1); // Receiver. 6856 HValue* context = environment()->context(); 6857 HInstruction* result = HMul::NewImul(zone(), context, left, right); 6858 ast_context()->ReturnInstruction(result, expr->id()); 6859 return true; 6860 } 6861 break; 6862 default: 6863 // Not yet supported for inlining. 6864 break; 6865 } 6866 return false; 6867 } 6868 6869 6870 bool HOptimizedGraphBuilder::TryCallApply(Call* expr) { 6871 Expression* callee = expr->expression(); 6872 Property* prop = callee->AsProperty(); 6873 ASSERT(prop != NULL); 6874 6875 if (!expr->IsMonomorphic() || expr->check_type() != RECEIVER_MAP_CHECK) { 6876 return false; 6877 } 6878 Handle<Map> function_map = expr->GetReceiverTypes()->first(); 6879 if (function_map->instance_type() != JS_FUNCTION_TYPE || 6880 !expr->target()->shared()->HasBuiltinFunctionId() || 6881 expr->target()->shared()->builtin_function_id() != kFunctionApply) { 6882 return false; 6883 } 6884 6885 if (current_info()->scope()->arguments() == NULL) return false; 6886 6887 ZoneList<Expression*>* args = expr->arguments(); 6888 if (args->length() != 2) return false; 6889 6890 VariableProxy* arg_two = args->at(1)->AsVariableProxy(); 6891 if (arg_two == NULL || !arg_two->var()->IsStackAllocated()) return false; 6892 HValue* arg_two_value = LookupAndMakeLive(arg_two->var()); 6893 if (!arg_two_value->CheckFlag(HValue::kIsArguments)) return false; 6894 6895 // Found pattern f.apply(receiver, arguments). 6896 VisitForValue(prop->obj()); 6897 if (HasStackOverflow() || current_block() == NULL) return true; 6898 HValue* function = Top(); 6899 AddCheckConstantFunction(expr->holder(), function, function_map); 6900 Drop(1); 6901 6902 VisitForValue(args->at(0)); 6903 if (HasStackOverflow() || current_block() == NULL) return true; 6904 HValue* receiver = Pop(); 6905 6906 if (function_state()->outer() == NULL) { 6907 HInstruction* elements = Add<HArgumentsElements>(false); 6908 HInstruction* length = Add<HArgumentsLength>(elements); 6909 HValue* wrapped_receiver = BuildWrapReceiver(receiver, function); 6910 HInstruction* result = 6911 new(zone()) HApplyArguments(function, 6912 wrapped_receiver, 6913 length, 6914 elements); 6915 result->set_position(expr->position()); 6916 ast_context()->ReturnInstruction(result, expr->id()); 6917 return true; 6918 } else { 6919 // We are inside inlined function and we know exactly what is inside 6920 // arguments object. But we need to be able to materialize at deopt. 6921 ASSERT_EQ(environment()->arguments_environment()->parameter_count(), 6922 function_state()->entry()->arguments_object()->arguments_count()); 6923 HArgumentsObject* args = function_state()->entry()->arguments_object(); 6924 const ZoneList<HValue*>* arguments_values = args->arguments_values(); 6925 int arguments_count = arguments_values->length(); 6926 Push(BuildWrapReceiver(receiver, function)); 6927 for (int i = 1; i < arguments_count; i++) { 6928 Push(arguments_values->at(i)); 6929 } 6930 6931 Handle<JSFunction> known_function; 6932 if (function->IsConstant()) { 6933 HConstant* constant_function = HConstant::cast(function); 6934 known_function = Handle<JSFunction>::cast(constant_function->handle()); 6935 int args_count = arguments_count - 1; // Excluding receiver. 6936 if (TryInlineApply(known_function, expr, args_count)) return true; 6937 } 6938 6939 Drop(arguments_count - 1); 6940 PushAndAdd(New<HPushArgument>(Pop())); 6941 for (int i = 1; i < arguments_count; i++) { 6942 PushAndAdd(New<HPushArgument>(arguments_values->at(i))); 6943 } 6944 6945 HValue* context = environment()->context(); 6946 HInvokeFunction* call = new(zone()) HInvokeFunction( 6947 context, 6948 function, 6949 known_function, 6950 arguments_count); 6951 Drop(arguments_count); 6952 call->set_position(expr->position()); 6953 ast_context()->ReturnInstruction(call, expr->id()); 6954 return true; 6955 } 6956 } 6957 6958 6959 void HOptimizedGraphBuilder::VisitCall(Call* expr) { 6960 ASSERT(!HasStackOverflow()); 6961 ASSERT(current_block() != NULL); 6962 ASSERT(current_block()->HasPredecessor()); 6963 Expression* callee = expr->expression(); 6964 int argument_count = expr->arguments()->length() + 1; // Plus receiver. 6965 HInstruction* call = NULL; 6966 6967 Property* prop = callee->AsProperty(); 6968 if (prop != NULL) { 6969 if (!prop->key()->IsPropertyName()) { 6970 // Keyed function call. 6971 CHECK_ALIVE(VisitArgument(prop->obj())); 6972 6973 CHECK_ALIVE(VisitForValue(prop->key())); 6974 // Push receiver and key like the non-optimized code generator expects it. 6975 HValue* key = Pop(); 6976 HValue* receiver = Pop(); 6977 Push(key); 6978 Push(receiver); 6979 6980 CHECK_ALIVE(VisitArgumentList(expr->arguments())); 6981 6982 HValue* context = environment()->context(); 6983 call = new(zone()) HCallKeyed(context, key, argument_count); 6984 call->set_position(expr->position()); 6985 Drop(argument_count + 1); // 1 is the key. 6986 return ast_context()->ReturnInstruction(call, expr->id()); 6987 } 6988 6989 // Named function call. 6990 if (TryCallApply(expr)) return; 6991 6992 CHECK_ALIVE(VisitForValue(prop->obj())); 6993 CHECK_ALIVE(VisitExpressions(expr->arguments())); 6994 6995 Handle<String> name = prop->key()->AsLiteral()->AsPropertyName(); 6996 SmallMapList* types = expr->GetReceiverTypes(); 6997 6998 bool monomorphic = expr->IsMonomorphic(); 6999 Handle<Map> receiver_map; 7000 if (monomorphic) { 7001 receiver_map = (types == NULL || types->is_empty()) 7002 ? Handle<Map>::null() 7003 : types->first(); 7004 } 7005 7006 HValue* receiver = 7007 environment()->ExpressionStackAt(expr->arguments()->length()); 7008 if (monomorphic) { 7009 if (TryInlineBuiltinMethodCall(expr, 7010 receiver, 7011 receiver_map, 7012 expr->check_type())) { 7013 if (FLAG_trace_inlining) { 7014 PrintF("Inlining builtin "); 7015 expr->target()->ShortPrint(); 7016 PrintF("\n"); 7017 } 7018 return; 7019 } 7020 7021 if (CallStubCompiler::HasCustomCallGenerator(expr->target()) || 7022 expr->check_type() != RECEIVER_MAP_CHECK) { 7023 // When the target has a custom call IC generator, use the IC, 7024 // because it is likely to generate better code. Also use the IC 7025 // when a primitive receiver check is required. 7026 HValue* context = environment()->context(); 7027 call = PreProcessCall( 7028 new(zone()) HCallNamed(context, name, argument_count)); 7029 } else { 7030 AddCheckConstantFunction(expr->holder(), receiver, receiver_map); 7031 7032 if (TryInlineCall(expr)) return; 7033 call = PreProcessCall( 7034 new(zone()) HCallConstantFunction(expr->target(), 7035 argument_count)); 7036 } 7037 } else if (types != NULL && types->length() > 1) { 7038 ASSERT(expr->check_type() == RECEIVER_MAP_CHECK); 7039 HandlePolymorphicCallNamed(expr, receiver, types, name); 7040 return; 7041 7042 } else { 7043 HValue* context = environment()->context(); 7044 call = PreProcessCall( 7045 new(zone()) HCallNamed(context, name, argument_count)); 7046 } 7047 7048 } else { 7049 VariableProxy* proxy = expr->expression()->AsVariableProxy(); 7050 if (proxy != NULL && proxy->var()->is_possibly_eval(isolate())) { 7051 return Bailout(kPossibleDirectCallToEval); 7052 } 7053 7054 bool global_call = proxy != NULL && proxy->var()->IsUnallocated(); 7055 if (global_call) { 7056 Variable* var = proxy->var(); 7057 bool known_global_function = false; 7058 // If there is a global property cell for the name at compile time and 7059 // access check is not enabled we assume that the function will not change 7060 // and generate optimized code for calling the function. 7061 LookupResult lookup(isolate()); 7062 GlobalPropertyAccess type = LookupGlobalProperty(var, &lookup, false); 7063 if (type == kUseCell && 7064 !current_info()->global_object()->IsAccessCheckNeeded()) { 7065 Handle<GlobalObject> global(current_info()->global_object()); 7066 known_global_function = expr->ComputeGlobalTarget(global, &lookup); 7067 } 7068 if (known_global_function) { 7069 // Push the global object instead of the global receiver because 7070 // code generated by the full code generator expects it. 7071 HValue* context = environment()->context(); 7072 HGlobalObject* global_object = new(zone()) HGlobalObject(context); 7073 PushAndAdd(global_object); 7074 CHECK_ALIVE(VisitExpressions(expr->arguments())); 7075 7076 CHECK_ALIVE(VisitForValue(expr->expression())); 7077 HValue* function = Pop(); 7078 Add<HCheckFunction>(function, expr->target()); 7079 7080 // Replace the global object with the global receiver. 7081 HGlobalReceiver* global_receiver = Add<HGlobalReceiver>(global_object); 7082 // Index of the receiver from the top of the expression stack. 7083 const int receiver_index = argument_count - 1; 7084 ASSERT(environment()->ExpressionStackAt(receiver_index)-> 7085 IsGlobalObject()); 7086 environment()->SetExpressionStackAt(receiver_index, global_receiver); 7087 7088 if (TryInlineBuiltinFunctionCall(expr, false)) { // Nothing to drop. 7089 if (FLAG_trace_inlining) { 7090 PrintF("Inlining builtin "); 7091 expr->target()->ShortPrint(); 7092 PrintF("\n"); 7093 } 7094 return; 7095 } 7096 if (TryInlineCall(expr)) return; 7097 7098 if (expr->target().is_identical_to(current_info()->closure())) { 7099 graph()->MarkRecursive(); 7100 } 7101 7102 if (CallStubCompiler::HasCustomCallGenerator(expr->target())) { 7103 // When the target has a custom call IC generator, use the IC, 7104 // because it is likely to generate better code. 7105 HValue* context = environment()->context(); 7106 call = PreProcessCall( 7107 new(zone()) HCallNamed(context, var->name(), argument_count)); 7108 } else { 7109 call = PreProcessCall(new(zone()) HCallKnownGlobal(expr->target(), 7110 argument_count)); 7111 } 7112 } else { 7113 HGlobalObject* receiver = Add<HGlobalObject>(); 7114 PushAndAdd(New<HPushArgument>(receiver)); 7115 CHECK_ALIVE(VisitArgumentList(expr->arguments())); 7116 7117 call = New<HCallGlobal>(var->name(), argument_count); 7118 Drop(argument_count); 7119 } 7120 7121 } else if (expr->IsMonomorphic()) { 7122 // The function is on the stack in the unoptimized code during 7123 // evaluation of the arguments. 7124 CHECK_ALIVE(VisitForValue(expr->expression())); 7125 HValue* function = Top(); 7126 HGlobalObject* global = Add<HGlobalObject>(); 7127 HGlobalReceiver* receiver = New<HGlobalReceiver>(global); 7128 PushAndAdd(receiver); 7129 CHECK_ALIVE(VisitExpressions(expr->arguments())); 7130 Add<HCheckFunction>(function, expr->target()); 7131 7132 if (TryInlineBuiltinFunctionCall(expr, true)) { // Drop the function. 7133 if (FLAG_trace_inlining) { 7134 PrintF("Inlining builtin "); 7135 expr->target()->ShortPrint(); 7136 PrintF("\n"); 7137 } 7138 return; 7139 } 7140 7141 if (TryInlineCall(expr, true)) { // Drop function from environment. 7142 return; 7143 } else { 7144 call = PreProcessCall(New<HInvokeFunction>(function, expr->target(), 7145 argument_count)); 7146 Drop(1); // The function. 7147 } 7148 7149 } else { 7150 CHECK_ALIVE(VisitForValue(expr->expression())); 7151 HValue* function = Top(); 7152 HGlobalObject* global_object = Add<HGlobalObject>(); 7153 HGlobalReceiver* receiver = Add<HGlobalReceiver>(global_object); 7154 PushAndAdd(New<HPushArgument>(receiver)); 7155 CHECK_ALIVE(VisitArgumentList(expr->arguments())); 7156 7157 call = New<HCallFunction>(function, argument_count); 7158 Drop(argument_count + 1); 7159 } 7160 } 7161 7162 call->set_position(expr->position()); 7163 return ast_context()->ReturnInstruction(call, expr->id()); 7164 } 7165 7166 7167 // Checks whether allocation using the given constructor can be inlined. 7168 static bool IsAllocationInlineable(Handle<JSFunction> constructor) { 7169 return constructor->has_initial_map() && 7170 constructor->initial_map()->instance_type() == JS_OBJECT_TYPE && 7171 constructor->initial_map()->instance_size() < HAllocate::kMaxInlineSize && 7172 constructor->initial_map()->InitialPropertiesLength() == 0; 7173 } 7174 7175 7176 void HOptimizedGraphBuilder::VisitCallNew(CallNew* expr) { 7177 ASSERT(!HasStackOverflow()); 7178 ASSERT(current_block() != NULL); 7179 ASSERT(current_block()->HasPredecessor()); 7180 int argument_count = expr->arguments()->length() + 1; // Plus constructor. 7181 HValue* context = environment()->context(); 7182 Factory* factory = isolate()->factory(); 7183 7184 if (FLAG_inline_construct && 7185 expr->IsMonomorphic() && 7186 IsAllocationInlineable(expr->target())) { 7187 // The constructor function is on the stack in the unoptimized code 7188 // during evaluation of the arguments. 7189 CHECK_ALIVE(VisitForValue(expr->expression())); 7190 HValue* function = Top(); 7191 CHECK_ALIVE(VisitExpressions(expr->arguments())); 7192 Handle<JSFunction> constructor = expr->target(); 7193 HValue* check = Add<HCheckFunction>(function, constructor); 7194 7195 // Force completion of inobject slack tracking before generating 7196 // allocation code to finalize instance size. 7197 if (constructor->shared()->IsInobjectSlackTrackingInProgress()) { 7198 constructor->shared()->CompleteInobjectSlackTracking(); 7199 } 7200 7201 // Calculate instance size from initial map of constructor. 7202 ASSERT(constructor->has_initial_map()); 7203 Handle<Map> initial_map(constructor->initial_map()); 7204 int instance_size = initial_map->instance_size(); 7205 ASSERT(initial_map->InitialPropertiesLength() == 0); 7206 7207 // Allocate an instance of the implicit receiver object. 7208 HValue* size_in_bytes = Add<HConstant>(instance_size); 7209 PretenureFlag pretenure_flag = 7210 (FLAG_pretenuring_call_new && 7211 isolate()->heap()->GetPretenureMode() == TENURED) 7212 ? TENURED : NOT_TENURED; 7213 HAllocate* receiver = 7214 Add<HAllocate>(size_in_bytes, HType::JSObject(), pretenure_flag, 7215 JS_OBJECT_TYPE); 7216 receiver->set_known_initial_map(initial_map); 7217 7218 // Load the initial map from the constructor. 7219 HValue* constructor_value = Add<HConstant>(constructor); 7220 HValue* initial_map_value = 7221 Add<HLoadNamedField>(constructor_value, HObjectAccess::ForJSObjectOffset( 7222 JSFunction::kPrototypeOrInitialMapOffset)); 7223 7224 // Initialize map and fields of the newly allocated object. 7225 { NoObservableSideEffectsScope no_effects(this); 7226 ASSERT(initial_map->instance_type() == JS_OBJECT_TYPE); 7227 Add<HStoreNamedField>(receiver, 7228 HObjectAccess::ForJSObjectOffset(JSObject::kMapOffset), 7229 initial_map_value); 7230 HValue* empty_fixed_array = Add<HConstant>(factory->empty_fixed_array()); 7231 Add<HStoreNamedField>(receiver, 7232 HObjectAccess::ForJSObjectOffset(JSObject::kPropertiesOffset), 7233 empty_fixed_array); 7234 Add<HStoreNamedField>(receiver, 7235 HObjectAccess::ForJSObjectOffset(JSObject::kElementsOffset), 7236 empty_fixed_array); 7237 if (initial_map->inobject_properties() != 0) { 7238 HConstant* undefined = graph()->GetConstantUndefined(); 7239 for (int i = 0; i < initial_map->inobject_properties(); i++) { 7240 int property_offset = JSObject::kHeaderSize + i * kPointerSize; 7241 Add<HStoreNamedField>(receiver, 7242 HObjectAccess::ForJSObjectOffset(property_offset), 7243 undefined); 7244 } 7245 } 7246 } 7247 7248 // Replace the constructor function with a newly allocated receiver using 7249 // the index of the receiver from the top of the expression stack. 7250 const int receiver_index = argument_count - 1; 7251 ASSERT(environment()->ExpressionStackAt(receiver_index) == function); 7252 environment()->SetExpressionStackAt(receiver_index, receiver); 7253 7254 if (TryInlineConstruct(expr, receiver)) return; 7255 7256 // TODO(mstarzinger): For now we remove the previous HAllocate and all 7257 // corresponding instructions and instead add HPushArgument for the 7258 // arguments in case inlining failed. What we actually should do is for 7259 // inlining to try to build a subgraph without mutating the parent graph. 7260 HInstruction* instr = current_block()->last(); 7261 while (instr != initial_map_value) { 7262 HInstruction* prev_instr = instr->previous(); 7263 instr->DeleteAndReplaceWith(NULL); 7264 instr = prev_instr; 7265 } 7266 initial_map_value->DeleteAndReplaceWith(NULL); 7267 receiver->DeleteAndReplaceWith(NULL); 7268 check->DeleteAndReplaceWith(NULL); 7269 environment()->SetExpressionStackAt(receiver_index, function); 7270 HInstruction* call = PreProcessCall( 7271 new(zone()) HCallNew(context, function, argument_count)); 7272 call->set_position(expr->position()); 7273 return ast_context()->ReturnInstruction(call, expr->id()); 7274 } else { 7275 // The constructor function is both an operand to the instruction and an 7276 // argument to the construct call. 7277 Handle<JSFunction> array_function( 7278 isolate()->global_context()->array_function(), isolate()); 7279 CHECK_ALIVE(VisitArgument(expr->expression())); 7280 HValue* constructor = HPushArgument::cast(Top())->argument(); 7281 CHECK_ALIVE(VisitArgumentList(expr->arguments())); 7282 HCallNew* call; 7283 if (expr->target().is_identical_to(array_function)) { 7284 Handle<Cell> cell = expr->allocation_info_cell(); 7285 Add<HCheckFunction>(constructor, array_function); 7286 call = new(zone()) HCallNewArray(context, constructor, argument_count, 7287 cell, expr->elements_kind()); 7288 } else { 7289 call = new(zone()) HCallNew(context, constructor, argument_count); 7290 } 7291 Drop(argument_count); 7292 call->set_position(expr->position()); 7293 return ast_context()->ReturnInstruction(call, expr->id()); 7294 } 7295 } 7296 7297 7298 // Support for generating inlined runtime functions. 7299 7300 // Lookup table for generators for runtime calls that are generated inline. 7301 // Elements of the table are member pointers to functions of 7302 // HOptimizedGraphBuilder. 7303 #define INLINE_FUNCTION_GENERATOR_ADDRESS(Name, argc, ressize) \ 7304 &HOptimizedGraphBuilder::Generate##Name, 7305 7306 const HOptimizedGraphBuilder::InlineFunctionGenerator 7307 HOptimizedGraphBuilder::kInlineFunctionGenerators[] = { 7308 INLINE_FUNCTION_LIST(INLINE_FUNCTION_GENERATOR_ADDRESS) 7309 INLINE_RUNTIME_FUNCTION_LIST(INLINE_FUNCTION_GENERATOR_ADDRESS) 7310 }; 7311 #undef INLINE_FUNCTION_GENERATOR_ADDRESS 7312 7313 7314 void HOptimizedGraphBuilder::VisitCallRuntime(CallRuntime* expr) { 7315 ASSERT(!HasStackOverflow()); 7316 ASSERT(current_block() != NULL); 7317 ASSERT(current_block()->HasPredecessor()); 7318 if (expr->is_jsruntime()) { 7319 return Bailout(kCallToAJavaScriptRuntimeFunction); 7320 } 7321 7322 const Runtime::Function* function = expr->function(); 7323 ASSERT(function != NULL); 7324 if (function->intrinsic_type == Runtime::INLINE) { 7325 ASSERT(expr->name()->length() > 0); 7326 ASSERT(expr->name()->Get(0) == '_'); 7327 // Call to an inline function. 7328 int lookup_index = static_cast<int>(function->function_id) - 7329 static_cast<int>(Runtime::kFirstInlineFunction); 7330 ASSERT(lookup_index >= 0); 7331 ASSERT(static_cast<size_t>(lookup_index) < 7332 ARRAY_SIZE(kInlineFunctionGenerators)); 7333 InlineFunctionGenerator generator = kInlineFunctionGenerators[lookup_index]; 7334 7335 // Call the inline code generator using the pointer-to-member. 7336 (this->*generator)(expr); 7337 } else { 7338 ASSERT(function->intrinsic_type == Runtime::RUNTIME); 7339 CHECK_ALIVE(VisitArgumentList(expr->arguments())); 7340 7341 Handle<String> name = expr->name(); 7342 int argument_count = expr->arguments()->length(); 7343 HCallRuntime* call = New<HCallRuntime>(name, function, 7344 argument_count); 7345 Drop(argument_count); 7346 return ast_context()->ReturnInstruction(call, expr->id()); 7347 } 7348 } 7349 7350 7351 void HOptimizedGraphBuilder::VisitUnaryOperation(UnaryOperation* expr) { 7352 ASSERT(!HasStackOverflow()); 7353 ASSERT(current_block() != NULL); 7354 ASSERT(current_block()->HasPredecessor()); 7355 switch (expr->op()) { 7356 case Token::DELETE: return VisitDelete(expr); 7357 case Token::VOID: return VisitVoid(expr); 7358 case Token::TYPEOF: return VisitTypeof(expr); 7359 case Token::NOT: return VisitNot(expr); 7360 default: UNREACHABLE(); 7361 } 7362 } 7363 7364 7365 void HOptimizedGraphBuilder::VisitDelete(UnaryOperation* expr) { 7366 Property* prop = expr->expression()->AsProperty(); 7367 VariableProxy* proxy = expr->expression()->AsVariableProxy(); 7368 if (prop != NULL) { 7369 CHECK_ALIVE(VisitForValue(prop->obj())); 7370 CHECK_ALIVE(VisitForValue(prop->key())); 7371 HValue* key = Pop(); 7372 HValue* obj = Pop(); 7373 HValue* function = AddLoadJSBuiltin(Builtins::DELETE); 7374 Add<HPushArgument>(obj); 7375 Add<HPushArgument>(key); 7376 Add<HPushArgument>(Add<HConstant>(function_strict_mode_flag())); 7377 // TODO(olivf) InvokeFunction produces a check for the parameter count, 7378 // even though we are certain to pass the correct number of arguments here. 7379 HInstruction* instr = New<HInvokeFunction>(function, 3); 7380 return ast_context()->ReturnInstruction(instr, expr->id()); 7381 } else if (proxy != NULL) { 7382 Variable* var = proxy->var(); 7383 if (var->IsUnallocated()) { 7384 Bailout(kDeleteWithGlobalVariable); 7385 } else if (var->IsStackAllocated() || var->IsContextSlot()) { 7386 // Result of deleting non-global variables is false. 'this' is not 7387 // really a variable, though we implement it as one. The 7388 // subexpression does not have side effects. 7389 HValue* value = var->is_this() 7390 ? graph()->GetConstantTrue() 7391 : graph()->GetConstantFalse(); 7392 return ast_context()->ReturnValue(value); 7393 } else { 7394 Bailout(kDeleteWithNonGlobalVariable); 7395 } 7396 } else { 7397 // Result of deleting non-property, non-variable reference is true. 7398 // Evaluate the subexpression for side effects. 7399 CHECK_ALIVE(VisitForEffect(expr->expression())); 7400 return ast_context()->ReturnValue(graph()->GetConstantTrue()); 7401 } 7402 } 7403 7404 7405 void HOptimizedGraphBuilder::VisitVoid(UnaryOperation* expr) { 7406 CHECK_ALIVE(VisitForEffect(expr->expression())); 7407 return ast_context()->ReturnValue(graph()->GetConstantUndefined()); 7408 } 7409 7410 7411 void HOptimizedGraphBuilder::VisitTypeof(UnaryOperation* expr) { 7412 CHECK_ALIVE(VisitForTypeOf(expr->expression())); 7413 HValue* value = Pop(); 7414 HValue* context = environment()->context(); 7415 HInstruction* instr = new(zone()) HTypeof(context, value); 7416 return ast_context()->ReturnInstruction(instr, expr->id()); 7417 } 7418 7419 7420 void HOptimizedGraphBuilder::VisitNot(UnaryOperation* expr) { 7421 if (ast_context()->IsTest()) { 7422 TestContext* context = TestContext::cast(ast_context()); 7423 VisitForControl(expr->expression(), 7424 context->if_false(), 7425 context->if_true()); 7426 return; 7427 } 7428 7429 if (ast_context()->IsEffect()) { 7430 VisitForEffect(expr->expression()); 7431 return; 7432 } 7433 7434 ASSERT(ast_context()->IsValue()); 7435 HBasicBlock* materialize_false = graph()->CreateBasicBlock(); 7436 HBasicBlock* materialize_true = graph()->CreateBasicBlock(); 7437 CHECK_BAILOUT(VisitForControl(expr->expression(), 7438 materialize_false, 7439 materialize_true)); 7440 7441 if (materialize_false->HasPredecessor()) { 7442 materialize_false->SetJoinId(expr->MaterializeFalseId()); 7443 set_current_block(materialize_false); 7444 Push(graph()->GetConstantFalse()); 7445 } else { 7446 materialize_false = NULL; 7447 } 7448 7449 if (materialize_true->HasPredecessor()) { 7450 materialize_true->SetJoinId(expr->MaterializeTrueId()); 7451 set_current_block(materialize_true); 7452 Push(graph()->GetConstantTrue()); 7453 } else { 7454 materialize_true = NULL; 7455 } 7456 7457 HBasicBlock* join = 7458 CreateJoin(materialize_false, materialize_true, expr->id()); 7459 set_current_block(join); 7460 if (join != NULL) return ast_context()->ReturnValue(Pop()); 7461 } 7462 7463 7464 HInstruction* HOptimizedGraphBuilder::BuildIncrement( 7465 bool returns_original_input, 7466 CountOperation* expr) { 7467 // The input to the count operation is on top of the expression stack. 7468 TypeInfo info = expr->type(); 7469 Representation rep = Representation::FromType(info); 7470 if (rep.IsNone() || rep.IsTagged()) { 7471 rep = Representation::Smi(); 7472 } 7473 7474 if (returns_original_input) { 7475 // We need an explicit HValue representing ToNumber(input). The 7476 // actual HChange instruction we need is (sometimes) added in a later 7477 // phase, so it is not available now to be used as an input to HAdd and 7478 // as the return value. 7479 HInstruction* number_input = Add<HForceRepresentation>(Pop(), rep); 7480 if (!rep.IsDouble()) { 7481 number_input->SetFlag(HInstruction::kFlexibleRepresentation); 7482 number_input->SetFlag(HInstruction::kCannotBeTagged); 7483 } 7484 Push(number_input); 7485 } 7486 7487 // The addition has no side effects, so we do not need 7488 // to simulate the expression stack after this instruction. 7489 // Any later failures deopt to the load of the input or earlier. 7490 HConstant* delta = (expr->op() == Token::INC) 7491 ? graph()->GetConstant1() 7492 : graph()->GetConstantMinus1(); 7493 HInstruction* instr = Add<HAdd>(Top(), delta); 7494 instr->SetFlag(HInstruction::kCannotBeTagged); 7495 instr->ClearAllSideEffects(); 7496 return instr; 7497 } 7498 7499 7500 void HOptimizedGraphBuilder::VisitCountOperation(CountOperation* expr) { 7501 ASSERT(!HasStackOverflow()); 7502 ASSERT(current_block() != NULL); 7503 ASSERT(current_block()->HasPredecessor()); 7504 Expression* target = expr->expression(); 7505 VariableProxy* proxy = target->AsVariableProxy(); 7506 Property* prop = target->AsProperty(); 7507 if (proxy == NULL && prop == NULL) { 7508 return Bailout(kInvalidLhsInCountOperation); 7509 } 7510 7511 // Match the full code generator stack by simulating an extra stack 7512 // element for postfix operations in a non-effect context. The return 7513 // value is ToNumber(input). 7514 bool returns_original_input = 7515 expr->is_postfix() && !ast_context()->IsEffect(); 7516 HValue* input = NULL; // ToNumber(original_input). 7517 HValue* after = NULL; // The result after incrementing or decrementing. 7518 7519 if (proxy != NULL) { 7520 Variable* var = proxy->var(); 7521 if (var->mode() == CONST) { 7522 return Bailout(kUnsupportedCountOperationWithConst); 7523 } 7524 // Argument of the count operation is a variable, not a property. 7525 ASSERT(prop == NULL); 7526 CHECK_ALIVE(VisitForValue(target)); 7527 7528 after = BuildIncrement(returns_original_input, expr); 7529 input = returns_original_input ? Top() : Pop(); 7530 Push(after); 7531 7532 switch (var->location()) { 7533 case Variable::UNALLOCATED: 7534 HandleGlobalVariableAssignment(var, 7535 after, 7536 expr->position(), 7537 expr->AssignmentId()); 7538 break; 7539 7540 case Variable::PARAMETER: 7541 case Variable::LOCAL: 7542 BindIfLive(var, after); 7543 break; 7544 7545 case Variable::CONTEXT: { 7546 // Bail out if we try to mutate a parameter value in a function 7547 // using the arguments object. We do not (yet) correctly handle the 7548 // arguments property of the function. 7549 if (current_info()->scope()->arguments() != NULL) { 7550 // Parameters will rewrite to context slots. We have no direct 7551 // way to detect that the variable is a parameter so we use a 7552 // linear search of the parameter list. 7553 int count = current_info()->scope()->num_parameters(); 7554 for (int i = 0; i < count; ++i) { 7555 if (var == current_info()->scope()->parameter(i)) { 7556 return Bailout(kAssignmentToParameterInArgumentsObject); 7557 } 7558 } 7559 } 7560 7561 HValue* context = BuildContextChainWalk(var); 7562 HStoreContextSlot::Mode mode = IsLexicalVariableMode(var->mode()) 7563 ? HStoreContextSlot::kCheckDeoptimize : HStoreContextSlot::kNoCheck; 7564 HStoreContextSlot* instr = Add<HStoreContextSlot>(context, var->index(), 7565 mode, after); 7566 if (instr->HasObservableSideEffects()) { 7567 Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE); 7568 } 7569 break; 7570 } 7571 7572 case Variable::LOOKUP: 7573 return Bailout(kLookupVariableInCountOperation); 7574 } 7575 7576 } else { 7577 // Argument of the count operation is a property. 7578 ASSERT(prop != NULL); 7579 7580 if (prop->key()->IsPropertyName()) { 7581 // Named property. 7582 if (returns_original_input) Push(graph()->GetConstantUndefined()); 7583 7584 CHECK_ALIVE(VisitForValue(prop->obj())); 7585 HValue* object = Top(); 7586 7587 Handle<String> name = prop->key()->AsLiteral()->AsPropertyName(); 7588 Handle<Map> map; 7589 HInstruction* load = NULL; 7590 bool monomorphic = prop->IsMonomorphic(); 7591 SmallMapList* types = prop->GetReceiverTypes(); 7592 if (monomorphic) { 7593 map = types->first(); 7594 if (map->is_dictionary_map()) monomorphic = false; 7595 } 7596 if (monomorphic) { 7597 Handle<JSFunction> getter; 7598 Handle<JSObject> holder; 7599 if (LookupGetter(map, name, &getter, &holder)) { 7600 load = BuildCallGetter(object, map, getter, holder); 7601 } else { 7602 load = BuildLoadNamedMonomorphic(object, name, prop, map); 7603 } 7604 } else if (types != NULL && types->length() > 1) { 7605 load = TryLoadPolymorphicAsMonomorphic(prop, object, types, name); 7606 } 7607 if (load == NULL) load = BuildLoadNamedGeneric(object, name, prop); 7608 PushAndAdd(load); 7609 if (load->HasObservableSideEffects()) { 7610 Add<HSimulate>(prop->LoadId(), REMOVABLE_SIMULATE); 7611 } 7612 7613 after = BuildIncrement(returns_original_input, expr); 7614 HValue* result = returns_original_input ? Pop() : after; 7615 7616 return BuildStoreNamed(expr, expr->id(), expr->position(), 7617 expr->AssignmentId(), prop, object, after, result); 7618 } else { 7619 // Keyed property. 7620 if (returns_original_input) Push(graph()->GetConstantUndefined()); 7621 7622 CHECK_ALIVE(VisitForValue(prop->obj())); 7623 CHECK_ALIVE(VisitForValue(prop->key())); 7624 HValue* obj = environment()->ExpressionStackAt(1); 7625 HValue* key = environment()->ExpressionStackAt(0); 7626 7627 bool has_side_effects = false; 7628 HValue* load = HandleKeyedElementAccess( 7629 obj, key, NULL, prop, prop->LoadId(), RelocInfo::kNoPosition, 7630 false, // is_store 7631 &has_side_effects); 7632 Push(load); 7633 if (has_side_effects) Add<HSimulate>(prop->LoadId(), REMOVABLE_SIMULATE); 7634 7635 after = BuildIncrement(returns_original_input, expr); 7636 input = environment()->ExpressionStackAt(0); 7637 7638 HandleKeyedElementAccess(obj, key, after, expr, expr->AssignmentId(), 7639 RelocInfo::kNoPosition, 7640 true, // is_store 7641 &has_side_effects); 7642 7643 // Drop the key and the original value from the bailout environment. 7644 // Overwrite the receiver with the result of the operation, and the 7645 // placeholder with the original value if necessary. 7646 Drop(2); 7647 environment()->SetExpressionStackAt(0, after); 7648 if (returns_original_input) environment()->SetExpressionStackAt(1, input); 7649 ASSERT(has_side_effects); // Stores always have side effects. 7650 Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE); 7651 } 7652 } 7653 7654 Drop(returns_original_input ? 2 : 1); 7655 return ast_context()->ReturnValue(expr->is_postfix() ? input : after); 7656 } 7657 7658 7659 HInstruction* HOptimizedGraphBuilder::BuildStringCharCodeAt( 7660 HValue* string, 7661 HValue* index) { 7662 if (string->IsConstant() && index->IsConstant()) { 7663 HConstant* c_string = HConstant::cast(string); 7664 HConstant* c_index = HConstant::cast(index); 7665 if (c_string->HasStringValue() && c_index->HasNumberValue()) { 7666 int32_t i = c_index->NumberValueAsInteger32(); 7667 Handle<String> s = c_string->StringValue(); 7668 if (i < 0 || i >= s->length()) { 7669 return New<HConstant>(OS::nan_value()); 7670 } 7671 return New<HConstant>(s->Get(i)); 7672 } 7673 } 7674 BuildCheckHeapObject(string); 7675 HValue* checkstring = 7676 AddInstruction(HCheckInstanceType::NewIsString(string, zone())); 7677 HInstruction* length = BuildLoadStringLength(string, checkstring); 7678 AddInstruction(length); 7679 HInstruction* checked_index = Add<HBoundsCheck>(index, length); 7680 return New<HStringCharCodeAt>(string, checked_index); 7681 } 7682 7683 7684 // Checks if the given shift amounts have form: (sa) and (32 - sa). 7685 static bool ShiftAmountsAllowReplaceByRotate(HValue* sa, 7686 HValue* const32_minus_sa) { 7687 if (!const32_minus_sa->IsSub()) return false; 7688 HSub* sub = HSub::cast(const32_minus_sa); 7689 if (sa != sub->right()) return false; 7690 HValue* const32 = sub->left(); 7691 if (!const32->IsConstant() || 7692 HConstant::cast(const32)->Integer32Value() != 32) { 7693 return false; 7694 } 7695 return (sub->right() == sa); 7696 } 7697 7698 7699 // Checks if the left and the right are shift instructions with the oposite 7700 // directions that can be replaced by one rotate right instruction or not. 7701 // Returns the operand and the shift amount for the rotate instruction in the 7702 // former case. 7703 bool HOptimizedGraphBuilder::MatchRotateRight(HValue* left, 7704 HValue* right, 7705 HValue** operand, 7706 HValue** shift_amount) { 7707 HShl* shl; 7708 HShr* shr; 7709 if (left->IsShl() && right->IsShr()) { 7710 shl = HShl::cast(left); 7711 shr = HShr::cast(right); 7712 } else if (left->IsShr() && right->IsShl()) { 7713 shl = HShl::cast(right); 7714 shr = HShr::cast(left); 7715 } else { 7716 return false; 7717 } 7718 if (shl->left() != shr->left()) return false; 7719 7720 if (!ShiftAmountsAllowReplaceByRotate(shl->right(), shr->right()) && 7721 !ShiftAmountsAllowReplaceByRotate(shr->right(), shl->right())) { 7722 return false; 7723 } 7724 *operand= shr->left(); 7725 *shift_amount = shr->right(); 7726 return true; 7727 } 7728 7729 7730 bool CanBeZero(HValue* right) { 7731 if (right->IsConstant()) { 7732 HConstant* right_const = HConstant::cast(right); 7733 if (right_const->HasInteger32Value() && 7734 (right_const->Integer32Value() & 0x1f) != 0) { 7735 return false; 7736 } 7737 } 7738 return true; 7739 } 7740 7741 7742 HValue* HGraphBuilder::TruncateToNumber(HValue* value, Handle<Type>* expected) { 7743 if (value->IsConstant()) { 7744 HConstant* constant = HConstant::cast(value); 7745 Maybe<HConstant*> number = constant->CopyToTruncatedNumber(zone()); 7746 if (number.has_value) { 7747 *expected = handle(Type::Number(), isolate()); 7748 return AddInstruction(number.value); 7749 } 7750 } 7751 7752 return value; 7753 } 7754 7755 7756 HInstruction* HOptimizedGraphBuilder::BuildBinaryOperation( 7757 BinaryOperation* expr, 7758 HValue* left, 7759 HValue* right) { 7760 HValue* context = environment()->context(); 7761 Handle<Type> left_type = expr->left()->bounds().lower; 7762 Handle<Type> right_type = expr->right()->bounds().lower; 7763 Handle<Type> result_type = expr->bounds().lower; 7764 Maybe<int> fixed_right_arg = expr->fixed_right_arg(); 7765 Representation left_rep = Representation::FromType(left_type); 7766 Representation right_rep = Representation::FromType(right_type); 7767 Representation result_rep = Representation::FromType(result_type); 7768 7769 if (expr->op() != Token::ADD || 7770 (left->type().IsNonString() && right->type().IsNonString())) { 7771 // For addition we can only truncate the arguments to number if we can 7772 // prove that we will not end up in string concatenation mode. 7773 left = TruncateToNumber(left, &left_type); 7774 right = TruncateToNumber(right, &right_type); 7775 } 7776 7777 if (left_type->Is(Type::None())) { 7778 Add<HDeoptimize>("Insufficient type feedback for left side", 7779 Deoptimizer::SOFT); 7780 // TODO(rossberg): we should be able to get rid of non-continuous defaults. 7781 left_type = handle(Type::Any(), isolate()); 7782 } 7783 if (right_type->Is(Type::None())) { 7784 Add<HDeoptimize>("Insufficient type feedback for right side", 7785 Deoptimizer::SOFT); 7786 right_type = handle(Type::Any(), isolate()); 7787 } 7788 HInstruction* instr = NULL; 7789 switch (expr->op()) { 7790 case Token::ADD: 7791 if (left_type->Is(Type::String()) && right_type->Is(Type::String())) { 7792 BuildCheckHeapObject(left); 7793 AddInstruction(HCheckInstanceType::NewIsString(left, zone())); 7794 BuildCheckHeapObject(right); 7795 AddInstruction(HCheckInstanceType::NewIsString(right, zone())); 7796 instr = HStringAdd::New(zone(), context, left, right); 7797 } else { 7798 instr = HAdd::New(zone(), context, left, right); 7799 } 7800 break; 7801 case Token::SUB: 7802 instr = HSub::New(zone(), context, left, right); 7803 break; 7804 case Token::MUL: 7805 instr = HMul::New(zone(), context, left, right); 7806 break; 7807 case Token::MOD: 7808 instr = HMod::New(zone(), context, left, right, fixed_right_arg); 7809 break; 7810 case Token::DIV: 7811 instr = HDiv::New(zone(), context, left, right); 7812 break; 7813 case Token::BIT_XOR: 7814 case Token::BIT_AND: 7815 instr = NewUncasted<HBitwise>(expr->op(), left, right); 7816 break; 7817 case Token::BIT_OR: { 7818 HValue* operand, *shift_amount; 7819 if (left_type->Is(Type::Signed32()) && 7820 right_type->Is(Type::Signed32()) && 7821 MatchRotateRight(left, right, &operand, &shift_amount)) { 7822 instr = new(zone()) HRor(context, operand, shift_amount); 7823 } else { 7824 instr = NewUncasted<HBitwise>(expr->op(), left, right); 7825 } 7826 break; 7827 } 7828 case Token::SAR: 7829 instr = HSar::New(zone(), context, left, right); 7830 break; 7831 case Token::SHR: 7832 instr = HShr::New(zone(), context, left, right); 7833 if (FLAG_opt_safe_uint32_operations && instr->IsShr() && 7834 CanBeZero(right)) { 7835 graph()->RecordUint32Instruction(instr); 7836 } 7837 break; 7838 case Token::SHL: 7839 instr = HShl::New(zone(), context, left, right); 7840 break; 7841 default: 7842 UNREACHABLE(); 7843 } 7844 7845 if (instr->IsBinaryOperation()) { 7846 HBinaryOperation* binop = HBinaryOperation::cast(instr); 7847 binop->set_observed_input_representation(1, left_rep); 7848 binop->set_observed_input_representation(2, right_rep); 7849 binop->initialize_output_representation(result_rep); 7850 } 7851 return instr; 7852 } 7853 7854 7855 // Check for the form (%_ClassOf(foo) === 'BarClass'). 7856 static bool IsClassOfTest(CompareOperation* expr) { 7857 if (expr->op() != Token::EQ_STRICT) return false; 7858 CallRuntime* call = expr->left()->AsCallRuntime(); 7859 if (call == NULL) return false; 7860 Literal* literal = expr->right()->AsLiteral(); 7861 if (literal == NULL) return false; 7862 if (!literal->value()->IsString()) return false; 7863 if (!call->name()->IsOneByteEqualTo(STATIC_ASCII_VECTOR("_ClassOf"))) { 7864 return false; 7865 } 7866 ASSERT(call->arguments()->length() == 1); 7867 return true; 7868 } 7869 7870 7871 void HOptimizedGraphBuilder::VisitBinaryOperation(BinaryOperation* expr) { 7872 ASSERT(!HasStackOverflow()); 7873 ASSERT(current_block() != NULL); 7874 ASSERT(current_block()->HasPredecessor()); 7875 switch (expr->op()) { 7876 case Token::COMMA: 7877 return VisitComma(expr); 7878 case Token::OR: 7879 case Token::AND: 7880 return VisitLogicalExpression(expr); 7881 default: 7882 return VisitArithmeticExpression(expr); 7883 } 7884 } 7885 7886 7887 void HOptimizedGraphBuilder::VisitComma(BinaryOperation* expr) { 7888 CHECK_ALIVE(VisitForEffect(expr->left())); 7889 // Visit the right subexpression in the same AST context as the entire 7890 // expression. 7891 Visit(expr->right()); 7892 } 7893 7894 7895 void HOptimizedGraphBuilder::VisitLogicalExpression(BinaryOperation* expr) { 7896 bool is_logical_and = expr->op() == Token::AND; 7897 if (ast_context()->IsTest()) { 7898 TestContext* context = TestContext::cast(ast_context()); 7899 // Translate left subexpression. 7900 HBasicBlock* eval_right = graph()->CreateBasicBlock(); 7901 if (is_logical_and) { 7902 CHECK_BAILOUT(VisitForControl(expr->left(), 7903 eval_right, 7904 context->if_false())); 7905 } else { 7906 CHECK_BAILOUT(VisitForControl(expr->left(), 7907 context->if_true(), 7908 eval_right)); 7909 } 7910 7911 // Translate right subexpression by visiting it in the same AST 7912 // context as the entire expression. 7913 if (eval_right->HasPredecessor()) { 7914 eval_right->SetJoinId(expr->RightId()); 7915 set_current_block(eval_right); 7916 Visit(expr->right()); 7917 } 7918 7919 } else if (ast_context()->IsValue()) { 7920 CHECK_ALIVE(VisitForValue(expr->left())); 7921 ASSERT(current_block() != NULL); 7922 HValue* left_value = Top(); 7923 7924 if (left_value->IsConstant()) { 7925 HConstant* left_constant = HConstant::cast(left_value); 7926 if ((is_logical_and && left_constant->BooleanValue()) || 7927 (!is_logical_and && !left_constant->BooleanValue())) { 7928 Drop(1); // left_value. 7929 CHECK_ALIVE(VisitForValue(expr->right())); 7930 } 7931 return ast_context()->ReturnValue(Pop()); 7932 } 7933 7934 // We need an extra block to maintain edge-split form. 7935 HBasicBlock* empty_block = graph()->CreateBasicBlock(); 7936 HBasicBlock* eval_right = graph()->CreateBasicBlock(); 7937 ToBooleanStub::Types expected(expr->left()->to_boolean_types()); 7938 HBranch* test = is_logical_and 7939 ? new(zone()) HBranch(left_value, expected, eval_right, empty_block) 7940 : new(zone()) HBranch(left_value, expected, empty_block, eval_right); 7941 current_block()->Finish(test); 7942 7943 set_current_block(eval_right); 7944 Drop(1); // Value of the left subexpression. 7945 CHECK_BAILOUT(VisitForValue(expr->right())); 7946 7947 HBasicBlock* join_block = 7948 CreateJoin(empty_block, current_block(), expr->id()); 7949 set_current_block(join_block); 7950 return ast_context()->ReturnValue(Pop()); 7951 7952 } else { 7953 ASSERT(ast_context()->IsEffect()); 7954 // In an effect context, we don't need the value of the left subexpression, 7955 // only its control flow and side effects. We need an extra block to 7956 // maintain edge-split form. 7957 HBasicBlock* empty_block = graph()->CreateBasicBlock(); 7958 HBasicBlock* right_block = graph()->CreateBasicBlock(); 7959 if (is_logical_and) { 7960 CHECK_BAILOUT(VisitForControl(expr->left(), right_block, empty_block)); 7961 } else { 7962 CHECK_BAILOUT(VisitForControl(expr->left(), empty_block, right_block)); 7963 } 7964 7965 // TODO(kmillikin): Find a way to fix this. It's ugly that there are 7966 // actually two empty blocks (one here and one inserted by 7967 // TestContext::BuildBranch, and that they both have an HSimulate though the 7968 // second one is not a merge node, and that we really have no good AST ID to 7969 // put on that first HSimulate. 7970 7971 if (empty_block->HasPredecessor()) { 7972 empty_block->SetJoinId(expr->id()); 7973 } else { 7974 empty_block = NULL; 7975 } 7976 7977 if (right_block->HasPredecessor()) { 7978 right_block->SetJoinId(expr->RightId()); 7979 set_current_block(right_block); 7980 CHECK_BAILOUT(VisitForEffect(expr->right())); 7981 right_block = current_block(); 7982 } else { 7983 right_block = NULL; 7984 } 7985 7986 HBasicBlock* join_block = 7987 CreateJoin(empty_block, right_block, expr->id()); 7988 set_current_block(join_block); 7989 // We did not materialize any value in the predecessor environments, 7990 // so there is no need to handle it here. 7991 } 7992 } 7993 7994 7995 void HOptimizedGraphBuilder::VisitArithmeticExpression(BinaryOperation* expr) { 7996 CHECK_ALIVE(VisitForValue(expr->left())); 7997 CHECK_ALIVE(VisitForValue(expr->right())); 7998 HValue* right = Pop(); 7999 HValue* left = Pop(); 8000 HInstruction* instr = BuildBinaryOperation(expr, left, right); 8001 instr->set_position(expr->position()); 8002 return ast_context()->ReturnInstruction(instr, expr->id()); 8003 } 8004 8005 8006 void HOptimizedGraphBuilder::HandleLiteralCompareTypeof(CompareOperation* expr, 8007 Expression* sub_expr, 8008 Handle<String> check) { 8009 CHECK_ALIVE(VisitForTypeOf(sub_expr)); 8010 HValue* value = Pop(); 8011 HTypeofIsAndBranch* instr = new(zone()) HTypeofIsAndBranch(value, check); 8012 instr->set_position(expr->position()); 8013 return ast_context()->ReturnControl(instr, expr->id()); 8014 } 8015 8016 8017 static bool IsLiteralCompareBool(HValue* left, 8018 Token::Value op, 8019 HValue* right) { 8020 return op == Token::EQ_STRICT && 8021 ((left->IsConstant() && HConstant::cast(left)->handle()->IsBoolean()) || 8022 (right->IsConstant() && HConstant::cast(right)->handle()->IsBoolean())); 8023 } 8024 8025 8026 void HOptimizedGraphBuilder::VisitCompareOperation(CompareOperation* expr) { 8027 ASSERT(!HasStackOverflow()); 8028 ASSERT(current_block() != NULL); 8029 ASSERT(current_block()->HasPredecessor()); 8030 8031 // Check for a few fast cases. The AST visiting behavior must be in sync 8032 // with the full codegen: We don't push both left and right values onto 8033 // the expression stack when one side is a special-case literal. 8034 Expression* sub_expr = NULL; 8035 Handle<String> check; 8036 if (expr->IsLiteralCompareTypeof(&sub_expr, &check)) { 8037 return HandleLiteralCompareTypeof(expr, sub_expr, check); 8038 } 8039 if (expr->IsLiteralCompareUndefined(&sub_expr, isolate())) { 8040 return HandleLiteralCompareNil(expr, sub_expr, kUndefinedValue); 8041 } 8042 if (expr->IsLiteralCompareNull(&sub_expr)) { 8043 return HandleLiteralCompareNil(expr, sub_expr, kNullValue); 8044 } 8045 8046 if (IsClassOfTest(expr)) { 8047 CallRuntime* call = expr->left()->AsCallRuntime(); 8048 ASSERT(call->arguments()->length() == 1); 8049 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 8050 HValue* value = Pop(); 8051 Literal* literal = expr->right()->AsLiteral(); 8052 Handle<String> rhs = Handle<String>::cast(literal->value()); 8053 HClassOfTestAndBranch* instr = 8054 new(zone()) HClassOfTestAndBranch(value, rhs); 8055 instr->set_position(expr->position()); 8056 return ast_context()->ReturnControl(instr, expr->id()); 8057 } 8058 8059 Handle<Type> left_type = expr->left()->bounds().lower; 8060 Handle<Type> right_type = expr->right()->bounds().lower; 8061 Handle<Type> combined_type = expr->combined_type(); 8062 Representation combined_rep = Representation::FromType(combined_type); 8063 Representation left_rep = Representation::FromType(left_type); 8064 Representation right_rep = Representation::FromType(right_type); 8065 8066 CHECK_ALIVE(VisitForValue(expr->left())); 8067 CHECK_ALIVE(VisitForValue(expr->right())); 8068 8069 HValue* context = environment()->context(); 8070 HValue* right = Pop(); 8071 HValue* left = Pop(); 8072 Token::Value op = expr->op(); 8073 8074 if (IsLiteralCompareBool(left, op, right)) { 8075 HCompareObjectEqAndBranch* result = 8076 New<HCompareObjectEqAndBranch>(left, right); 8077 result->set_position(expr->position()); 8078 return ast_context()->ReturnControl(result, expr->id()); 8079 } 8080 8081 if (op == Token::INSTANCEOF) { 8082 // Check to see if the rhs of the instanceof is a global function not 8083 // residing in new space. If it is we assume that the function will stay the 8084 // same. 8085 Handle<JSFunction> target = Handle<JSFunction>::null(); 8086 VariableProxy* proxy = expr->right()->AsVariableProxy(); 8087 bool global_function = (proxy != NULL) && proxy->var()->IsUnallocated(); 8088 if (global_function && 8089 current_info()->has_global_object() && 8090 !current_info()->global_object()->IsAccessCheckNeeded()) { 8091 Handle<String> name = proxy->name(); 8092 Handle<GlobalObject> global(current_info()->global_object()); 8093 LookupResult lookup(isolate()); 8094 global->Lookup(*name, &lookup); 8095 if (lookup.IsNormal() && lookup.GetValue()->IsJSFunction()) { 8096 Handle<JSFunction> candidate(JSFunction::cast(lookup.GetValue())); 8097 // If the function is in new space we assume it's more likely to 8098 // change and thus prefer the general IC code. 8099 if (!isolate()->heap()->InNewSpace(*candidate)) { 8100 target = candidate; 8101 } 8102 } 8103 } 8104 8105 // If the target is not null we have found a known global function that is 8106 // assumed to stay the same for this instanceof. 8107 if (target.is_null()) { 8108 HInstanceOf* result = new(zone()) HInstanceOf(context, left, right); 8109 result->set_position(expr->position()); 8110 return ast_context()->ReturnInstruction(result, expr->id()); 8111 } else { 8112 Add<HCheckFunction>(right, target); 8113 HInstanceOfKnownGlobal* result = 8114 new(zone()) HInstanceOfKnownGlobal(context, left, target); 8115 result->set_position(expr->position()); 8116 return ast_context()->ReturnInstruction(result, expr->id()); 8117 } 8118 8119 // Code below assumes that we don't fall through. 8120 UNREACHABLE(); 8121 } else if (op == Token::IN) { 8122 HValue* function = AddLoadJSBuiltin(Builtins::IN); 8123 Add<HPushArgument>(left); 8124 Add<HPushArgument>(right); 8125 // TODO(olivf) InvokeFunction produces a check for the parameter count, 8126 // even though we are certain to pass the correct number of arguments here. 8127 HInstruction* result = new(zone()) HInvokeFunction(context, function, 2); 8128 result->set_position(expr->position()); 8129 return ast_context()->ReturnInstruction(result, expr->id()); 8130 } 8131 8132 // Cases handled below depend on collected type feedback. They should 8133 // soft deoptimize when there is no type feedback. 8134 if (combined_type->Is(Type::None())) { 8135 Add<HDeoptimize>("insufficient type feedback for combined type", 8136 Deoptimizer::SOFT); 8137 combined_type = left_type = right_type = handle(Type::Any(), isolate()); 8138 } 8139 8140 if (combined_type->Is(Type::Receiver())) { 8141 switch (op) { 8142 case Token::EQ: 8143 case Token::EQ_STRICT: { 8144 // Can we get away with map check and not instance type check? 8145 if (combined_type->IsClass()) { 8146 Handle<Map> map = combined_type->AsClass(); 8147 AddCheckMap(left, map); 8148 AddCheckMap(right, map); 8149 HCompareObjectEqAndBranch* result = 8150 New<HCompareObjectEqAndBranch>(left, right); 8151 result->set_position(expr->position()); 8152 return ast_context()->ReturnControl(result, expr->id()); 8153 } else { 8154 BuildCheckHeapObject(left); 8155 AddInstruction(HCheckInstanceType::NewIsSpecObject(left, zone())); 8156 BuildCheckHeapObject(right); 8157 AddInstruction(HCheckInstanceType::NewIsSpecObject(right, zone())); 8158 HCompareObjectEqAndBranch* result = 8159 new(zone()) HCompareObjectEqAndBranch(left, right); 8160 result->set_position(expr->position()); 8161 return ast_context()->ReturnControl(result, expr->id()); 8162 } 8163 } 8164 default: 8165 return Bailout(kUnsupportedNonPrimitiveCompare); 8166 } 8167 } else if (combined_type->Is(Type::InternalizedString()) && 8168 Token::IsEqualityOp(op)) { 8169 BuildCheckHeapObject(left); 8170 AddInstruction(HCheckInstanceType::NewIsInternalizedString(left, zone())); 8171 BuildCheckHeapObject(right); 8172 AddInstruction(HCheckInstanceType::NewIsInternalizedString(right, zone())); 8173 HCompareObjectEqAndBranch* result = 8174 new(zone()) HCompareObjectEqAndBranch(left, right); 8175 result->set_position(expr->position()); 8176 return ast_context()->ReturnControl(result, expr->id()); 8177 } else { 8178 if (combined_rep.IsTagged() || combined_rep.IsNone()) { 8179 HCompareGeneric* result = 8180 new(zone()) HCompareGeneric(context, left, right, op); 8181 result->set_observed_input_representation(1, left_rep); 8182 result->set_observed_input_representation(2, right_rep); 8183 result->set_position(expr->position()); 8184 return ast_context()->ReturnInstruction(result, expr->id()); 8185 } else { 8186 HCompareNumericAndBranch* result = 8187 new(zone()) HCompareNumericAndBranch(left, right, op); 8188 result->set_observed_input_representation(left_rep, right_rep); 8189 result->set_position(expr->position()); 8190 return ast_context()->ReturnControl(result, expr->id()); 8191 } 8192 } 8193 } 8194 8195 8196 void HOptimizedGraphBuilder::HandleLiteralCompareNil(CompareOperation* expr, 8197 Expression* sub_expr, 8198 NilValue nil) { 8199 ASSERT(!HasStackOverflow()); 8200 ASSERT(current_block() != NULL); 8201 ASSERT(current_block()->HasPredecessor()); 8202 ASSERT(expr->op() == Token::EQ || expr->op() == Token::EQ_STRICT); 8203 CHECK_ALIVE(VisitForValue(sub_expr)); 8204 HValue* value = Pop(); 8205 HIfContinuation continuation; 8206 if (expr->op() == Token::EQ_STRICT) { 8207 IfBuilder if_nil(this); 8208 if_nil.If<HCompareObjectEqAndBranch>( 8209 value, (nil == kNullValue) ? graph()->GetConstantNull() 8210 : graph()->GetConstantUndefined()); 8211 if_nil.Then(); 8212 if_nil.Else(); 8213 if_nil.CaptureContinuation(&continuation); 8214 return ast_context()->ReturnContinuation(&continuation, expr->id()); 8215 } 8216 Handle<Type> type = expr->combined_type()->Is(Type::None()) 8217 ? handle(Type::Any(), isolate_) : expr->combined_type(); 8218 BuildCompareNil(value, type, expr->position(), &continuation); 8219 return ast_context()->ReturnContinuation(&continuation, expr->id()); 8220 } 8221 8222 8223 HInstruction* HOptimizedGraphBuilder::BuildThisFunction() { 8224 // If we share optimized code between different closures, the 8225 // this-function is not a constant, except inside an inlined body. 8226 if (function_state()->outer() != NULL) { 8227 return New<HConstant>( 8228 function_state()->compilation_info()->closure()); 8229 } else { 8230 return new(zone()) HThisFunction; 8231 } 8232 } 8233 8234 8235 HInstruction* HOptimizedGraphBuilder::BuildFastLiteral( 8236 HValue* context, 8237 Handle<JSObject> boilerplate_object, 8238 Handle<JSObject> original_boilerplate_object, 8239 Handle<Object> allocation_site, 8240 int data_size, 8241 int pointer_size, 8242 AllocationSiteMode mode) { 8243 NoObservableSideEffectsScope no_effects(this); 8244 InstanceType instance_type = boilerplate_object->map()->instance_type(); 8245 ASSERT(instance_type == JS_ARRAY_TYPE || instance_type == JS_OBJECT_TYPE); 8246 HType type = instance_type == JS_ARRAY_TYPE 8247 ? HType::JSArray() : HType::JSObject(); 8248 HInstruction* target = NULL; 8249 HInstruction* data_target = NULL; 8250 8251 if (isolate()->heap()->GetPretenureMode() == TENURED) { 8252 if (data_size != 0) { 8253 HValue* size_in_bytes = Add<HConstant>(data_size); 8254 data_target = Add<HAllocate>(size_in_bytes, HType::JSObject(), TENURED, 8255 FIXED_DOUBLE_ARRAY_TYPE); 8256 Handle<Map> free_space_map = isolate()->factory()->free_space_map(); 8257 AddStoreMapConstant(data_target, free_space_map); 8258 HObjectAccess access = 8259 HObjectAccess::ForJSObjectOffset(FreeSpace::kSizeOffset); 8260 Add<HStoreNamedField>(data_target, access, size_in_bytes); 8261 } 8262 if (pointer_size != 0) { 8263 HValue* size_in_bytes = Add<HConstant>(pointer_size); 8264 target = Add<HAllocate>(size_in_bytes, type, TENURED, instance_type); 8265 } 8266 } else { 8267 HValue* size_in_bytes = Add<HConstant>(data_size + pointer_size); 8268 target = Add<HAllocate>(size_in_bytes, type, NOT_TENURED, instance_type); 8269 } 8270 8271 int offset = 0; 8272 int data_offset = 0; 8273 BuildEmitDeepCopy(boilerplate_object, original_boilerplate_object, 8274 allocation_site, target, &offset, data_target, 8275 &data_offset, mode); 8276 return target; 8277 } 8278 8279 8280 void HOptimizedGraphBuilder::BuildEmitDeepCopy( 8281 Handle<JSObject> boilerplate_object, 8282 Handle<JSObject> original_boilerplate_object, 8283 Handle<Object> allocation_site_object, 8284 HInstruction* target, 8285 int* offset, 8286 HInstruction* data_target, 8287 int* data_offset, 8288 AllocationSiteMode mode) { 8289 bool create_allocation_site_info = mode == TRACK_ALLOCATION_SITE && 8290 AllocationSite::CanTrack(boilerplate_object->map()->instance_type()); 8291 8292 // If using allocation sites, then the payload on the site should already 8293 // be filled in as a valid (boilerplate) array. 8294 ASSERT(!create_allocation_site_info || 8295 AllocationSite::cast(*allocation_site_object)->IsLiteralSite()); 8296 8297 HInstruction* allocation_site = NULL; 8298 8299 if (create_allocation_site_info) { 8300 allocation_site = Add<HConstant>(allocation_site_object); 8301 } 8302 8303 // Only elements backing stores for non-COW arrays need to be copied. 8304 Handle<FixedArrayBase> elements(boilerplate_object->elements()); 8305 Handle<FixedArrayBase> original_elements( 8306 original_boilerplate_object->elements()); 8307 ElementsKind kind = boilerplate_object->map()->elements_kind(); 8308 8309 int object_offset = *offset; 8310 int object_size = boilerplate_object->map()->instance_size(); 8311 int elements_size = (elements->length() > 0 && 8312 elements->map() != isolate()->heap()->fixed_cow_array_map()) ? 8313 elements->Size() : 0; 8314 int elements_offset = 0; 8315 8316 if (data_target != NULL && boilerplate_object->HasFastDoubleElements()) { 8317 elements_offset = *data_offset; 8318 *data_offset += elements_size; 8319 } else { 8320 // Place elements right after this object. 8321 elements_offset = *offset + object_size; 8322 *offset += elements_size; 8323 } 8324 // Increase the offset so that subsequent objects end up right after this 8325 // object (and it's elements if they are allocated in the same space). 8326 *offset += object_size; 8327 8328 // Copy object elements if non-COW. 8329 HValue* object_elements = BuildEmitObjectHeader(boilerplate_object, target, 8330 data_target, object_offset, elements_offset, elements_size); 8331 if (object_elements != NULL) { 8332 BuildEmitElements(elements, original_elements, kind, object_elements, 8333 target, offset, data_target, data_offset); 8334 } 8335 8336 // Copy in-object properties. 8337 if (boilerplate_object->map()->NumberOfFields() != 0) { 8338 HValue* object_properties = 8339 Add<HInnerAllocatedObject>(target, object_offset); 8340 BuildEmitInObjectProperties(boilerplate_object, original_boilerplate_object, 8341 object_properties, target, offset, data_target, data_offset); 8342 } 8343 8344 // Create allocation site info. 8345 if (mode == TRACK_ALLOCATION_SITE && 8346 AllocationSite::CanTrack(boilerplate_object->map()->instance_type())) { 8347 elements_offset += AllocationMemento::kSize; 8348 *offset += AllocationMemento::kSize; 8349 BuildCreateAllocationMemento(target, JSArray::kSize, allocation_site); 8350 } 8351 } 8352 8353 8354 HValue* HOptimizedGraphBuilder::BuildEmitObjectHeader( 8355 Handle<JSObject> boilerplate_object, 8356 HInstruction* target, 8357 HInstruction* data_target, 8358 int object_offset, 8359 int elements_offset, 8360 int elements_size) { 8361 ASSERT(boilerplate_object->properties()->length() == 0); 8362 HValue* result = NULL; 8363 8364 HValue* object_header = Add<HInnerAllocatedObject>(target, object_offset); 8365 Handle<Map> boilerplate_object_map(boilerplate_object->map()); 8366 AddStoreMapConstant(object_header, boilerplate_object_map); 8367 8368 HInstruction* elements; 8369 if (elements_size == 0) { 8370 Handle<Object> elements_field = 8371 Handle<Object>(boilerplate_object->elements(), isolate()); 8372 elements = Add<HConstant>(elements_field); 8373 } else { 8374 if (data_target != NULL && boilerplate_object->HasFastDoubleElements()) { 8375 elements = Add<HInnerAllocatedObject>(data_target, elements_offset); 8376 } else { 8377 elements = Add<HInnerAllocatedObject>(target, elements_offset); 8378 } 8379 result = elements; 8380 } 8381 Add<HStoreNamedField>(object_header, HObjectAccess::ForElementsPointer(), 8382 elements); 8383 8384 Handle<Object> properties_field = 8385 Handle<Object>(boilerplate_object->properties(), isolate()); 8386 ASSERT(*properties_field == isolate()->heap()->empty_fixed_array()); 8387 HInstruction* properties = Add<HConstant>(properties_field); 8388 HObjectAccess access = HObjectAccess::ForPropertiesPointer(); 8389 Add<HStoreNamedField>(object_header, access, properties); 8390 8391 if (boilerplate_object->IsJSArray()) { 8392 Handle<JSArray> boilerplate_array = 8393 Handle<JSArray>::cast(boilerplate_object); 8394 Handle<Object> length_field = 8395 Handle<Object>(boilerplate_array->length(), isolate()); 8396 HInstruction* length = Add<HConstant>(length_field); 8397 8398 ASSERT(boilerplate_array->length()->IsSmi()); 8399 Add<HStoreNamedField>(object_header, HObjectAccess::ForArrayLength( 8400 boilerplate_array->GetElementsKind()), length); 8401 } 8402 8403 return result; 8404 } 8405 8406 8407 void HOptimizedGraphBuilder::BuildEmitInObjectProperties( 8408 Handle<JSObject> boilerplate_object, 8409 Handle<JSObject> original_boilerplate_object, 8410 HValue* object_properties, 8411 HInstruction* target, 8412 int* offset, 8413 HInstruction* data_target, 8414 int* data_offset) { 8415 Handle<DescriptorArray> descriptors( 8416 boilerplate_object->map()->instance_descriptors()); 8417 int limit = boilerplate_object->map()->NumberOfOwnDescriptors(); 8418 8419 int copied_fields = 0; 8420 for (int i = 0; i < limit; i++) { 8421 PropertyDetails details = descriptors->GetDetails(i); 8422 if (details.type() != FIELD) continue; 8423 copied_fields++; 8424 int index = descriptors->GetFieldIndex(i); 8425 int property_offset = boilerplate_object->GetInObjectPropertyOffset(index); 8426 Handle<Name> name(descriptors->GetKey(i)); 8427 Handle<Object> value = 8428 Handle<Object>(boilerplate_object->InObjectPropertyAt(index), 8429 isolate()); 8430 8431 // The access for the store depends on the type of the boilerplate. 8432 HObjectAccess access = boilerplate_object->IsJSArray() ? 8433 HObjectAccess::ForJSArrayOffset(property_offset) : 8434 HObjectAccess::ForJSObjectOffset(property_offset); 8435 8436 if (value->IsJSObject()) { 8437 Handle<JSObject> value_object = Handle<JSObject>::cast(value); 8438 Handle<JSObject> original_value_object = Handle<JSObject>::cast( 8439 Handle<Object>(original_boilerplate_object->InObjectPropertyAt(index), 8440 isolate())); 8441 HInstruction* value_instruction = Add<HInnerAllocatedObject>(target, 8442 *offset); 8443 8444 Add<HStoreNamedField>(object_properties, access, value_instruction); 8445 BuildEmitDeepCopy(value_object, original_value_object, 8446 Handle<Object>::null(), target, 8447 offset, data_target, data_offset, 8448 DONT_TRACK_ALLOCATION_SITE); 8449 } else { 8450 Representation representation = details.representation(); 8451 HInstruction* value_instruction = Add<HConstant>(value); 8452 8453 if (representation.IsDouble()) { 8454 // Allocate a HeapNumber box and store the value into it. 8455 HInstruction* double_box; 8456 if (data_target != NULL) { 8457 double_box = Add<HInnerAllocatedObject>(data_target, *data_offset); 8458 *data_offset += HeapNumber::kSize; 8459 } else { 8460 double_box = Add<HInnerAllocatedObject>(target, *offset); 8461 *offset += HeapNumber::kSize; 8462 } 8463 AddStoreMapConstant(double_box, 8464 isolate()->factory()->heap_number_map()); 8465 Add<HStoreNamedField>(double_box, HObjectAccess::ForHeapNumberValue(), 8466 value_instruction); 8467 value_instruction = double_box; 8468 } 8469 8470 Add<HStoreNamedField>(object_properties, access, value_instruction); 8471 } 8472 } 8473 8474 int inobject_properties = boilerplate_object->map()->inobject_properties(); 8475 HInstruction* value_instruction = 8476 Add<HConstant>(isolate()->factory()->one_pointer_filler_map()); 8477 for (int i = copied_fields; i < inobject_properties; i++) { 8478 ASSERT(boilerplate_object->IsJSObject()); 8479 int property_offset = boilerplate_object->GetInObjectPropertyOffset(i); 8480 HObjectAccess access = HObjectAccess::ForJSObjectOffset(property_offset); 8481 Add<HStoreNamedField>(object_properties, access, value_instruction); 8482 } 8483 } 8484 8485 8486 void HOptimizedGraphBuilder::BuildEmitElements( 8487 Handle<FixedArrayBase> elements, 8488 Handle<FixedArrayBase> original_elements, 8489 ElementsKind kind, 8490 HValue* object_elements, 8491 HInstruction* target, 8492 int* offset, 8493 HInstruction* data_target, 8494 int* data_offset) { 8495 int elements_length = elements->length(); 8496 HValue* object_elements_length = Add<HConstant>(elements_length); 8497 8498 BuildInitializeElementsHeader(object_elements, kind, object_elements_length); 8499 8500 // Copy elements backing store content. 8501 if (elements->IsFixedDoubleArray()) { 8502 BuildEmitFixedDoubleArray(elements, kind, object_elements); 8503 } else if (elements->IsFixedArray()) { 8504 BuildEmitFixedArray(elements, original_elements, kind, object_elements, 8505 target, offset, data_target, data_offset); 8506 } else { 8507 UNREACHABLE(); 8508 } 8509 } 8510 8511 8512 void HOptimizedGraphBuilder::BuildEmitFixedDoubleArray( 8513 Handle<FixedArrayBase> elements, 8514 ElementsKind kind, 8515 HValue* object_elements) { 8516 HInstruction* boilerplate_elements = Add<HConstant>(elements); 8517 int elements_length = elements->length(); 8518 for (int i = 0; i < elements_length; i++) { 8519 HValue* key_constant = Add<HConstant>(i); 8520 HInstruction* value_instruction = 8521 Add<HLoadKeyed>(boilerplate_elements, key_constant, 8522 static_cast<HValue*>(NULL), kind, 8523 ALLOW_RETURN_HOLE); 8524 HInstruction* store = Add<HStoreKeyed>(object_elements, key_constant, 8525 value_instruction, kind); 8526 store->SetFlag(HValue::kAllowUndefinedAsNaN); 8527 } 8528 } 8529 8530 8531 void HOptimizedGraphBuilder::BuildEmitFixedArray( 8532 Handle<FixedArrayBase> elements, 8533 Handle<FixedArrayBase> original_elements, 8534 ElementsKind kind, 8535 HValue* object_elements, 8536 HInstruction* target, 8537 int* offset, 8538 HInstruction* data_target, 8539 int* data_offset) { 8540 HInstruction* boilerplate_elements = Add<HConstant>(elements); 8541 int elements_length = elements->length(); 8542 Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements); 8543 Handle<FixedArray> original_fast_elements = 8544 Handle<FixedArray>::cast(original_elements); 8545 for (int i = 0; i < elements_length; i++) { 8546 Handle<Object> value(fast_elements->get(i), isolate()); 8547 HValue* key_constant = Add<HConstant>(i); 8548 if (value->IsJSObject()) { 8549 Handle<JSObject> value_object = Handle<JSObject>::cast(value); 8550 Handle<JSObject> original_value_object = Handle<JSObject>::cast( 8551 Handle<Object>(original_fast_elements->get(i), isolate())); 8552 HInstruction* value_instruction = Add<HInnerAllocatedObject>(target, 8553 *offset); 8554 Add<HStoreKeyed>(object_elements, key_constant, value_instruction, kind); 8555 BuildEmitDeepCopy(value_object, original_value_object, 8556 Handle<Object>::null(), target, 8557 offset, data_target, data_offset, 8558 DONT_TRACK_ALLOCATION_SITE); 8559 } else { 8560 HInstruction* value_instruction = 8561 Add<HLoadKeyed>(boilerplate_elements, key_constant, 8562 static_cast<HValue*>(NULL), kind, 8563 ALLOW_RETURN_HOLE); 8564 Add<HStoreKeyed>(object_elements, key_constant, value_instruction, kind); 8565 } 8566 } 8567 } 8568 8569 8570 void HOptimizedGraphBuilder::VisitThisFunction(ThisFunction* expr) { 8571 ASSERT(!HasStackOverflow()); 8572 ASSERT(current_block() != NULL); 8573 ASSERT(current_block()->HasPredecessor()); 8574 HInstruction* instr = BuildThisFunction(); 8575 return ast_context()->ReturnInstruction(instr, expr->id()); 8576 } 8577 8578 8579 void HOptimizedGraphBuilder::VisitDeclarations( 8580 ZoneList<Declaration*>* declarations) { 8581 ASSERT(globals_.is_empty()); 8582 AstVisitor::VisitDeclarations(declarations); 8583 if (!globals_.is_empty()) { 8584 Handle<FixedArray> array = 8585 isolate()->factory()->NewFixedArray(globals_.length(), TENURED); 8586 for (int i = 0; i < globals_.length(); ++i) array->set(i, *globals_.at(i)); 8587 int flags = DeclareGlobalsEvalFlag::encode(current_info()->is_eval()) | 8588 DeclareGlobalsNativeFlag::encode(current_info()->is_native()) | 8589 DeclareGlobalsLanguageMode::encode(current_info()->language_mode()); 8590 Add<HDeclareGlobals>(array, flags); 8591 globals_.Clear(); 8592 } 8593 } 8594 8595 8596 void HOptimizedGraphBuilder::VisitVariableDeclaration( 8597 VariableDeclaration* declaration) { 8598 VariableProxy* proxy = declaration->proxy(); 8599 VariableMode mode = declaration->mode(); 8600 Variable* variable = proxy->var(); 8601 bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET; 8602 switch (variable->location()) { 8603 case Variable::UNALLOCATED: 8604 globals_.Add(variable->name(), zone()); 8605 globals_.Add(variable->binding_needs_init() 8606 ? isolate()->factory()->the_hole_value() 8607 : isolate()->factory()->undefined_value(), zone()); 8608 return; 8609 case Variable::PARAMETER: 8610 case Variable::LOCAL: 8611 if (hole_init) { 8612 HValue* value = graph()->GetConstantHole(); 8613 environment()->Bind(variable, value); 8614 } 8615 break; 8616 case Variable::CONTEXT: 8617 if (hole_init) { 8618 HValue* value = graph()->GetConstantHole(); 8619 HValue* context = environment()->context(); 8620 HStoreContextSlot* store = Add<HStoreContextSlot>( 8621 context, variable->index(), HStoreContextSlot::kNoCheck, value); 8622 if (store->HasObservableSideEffects()) { 8623 Add<HSimulate>(proxy->id(), REMOVABLE_SIMULATE); 8624 } 8625 } 8626 break; 8627 case Variable::LOOKUP: 8628 return Bailout(kUnsupportedLookupSlotInDeclaration); 8629 } 8630 } 8631 8632 8633 void HOptimizedGraphBuilder::VisitFunctionDeclaration( 8634 FunctionDeclaration* declaration) { 8635 VariableProxy* proxy = declaration->proxy(); 8636 Variable* variable = proxy->var(); 8637 switch (variable->location()) { 8638 case Variable::UNALLOCATED: { 8639 globals_.Add(variable->name(), zone()); 8640 Handle<SharedFunctionInfo> function = Compiler::BuildFunctionInfo( 8641 declaration->fun(), current_info()->script()); 8642 // Check for stack-overflow exception. 8643 if (function.is_null()) return SetStackOverflow(); 8644 globals_.Add(function, zone()); 8645 return; 8646 } 8647 case Variable::PARAMETER: 8648 case Variable::LOCAL: { 8649 CHECK_ALIVE(VisitForValue(declaration->fun())); 8650 HValue* value = Pop(); 8651 BindIfLive(variable, value); 8652 break; 8653 } 8654 case Variable::CONTEXT: { 8655 CHECK_ALIVE(VisitForValue(declaration->fun())); 8656 HValue* value = Pop(); 8657 HValue* context = environment()->context(); 8658 HStoreContextSlot* store = Add<HStoreContextSlot>( 8659 context, variable->index(), HStoreContextSlot::kNoCheck, value); 8660 if (store->HasObservableSideEffects()) { 8661 Add<HSimulate>(proxy->id(), REMOVABLE_SIMULATE); 8662 } 8663 break; 8664 } 8665 case Variable::LOOKUP: 8666 return Bailout(kUnsupportedLookupSlotInDeclaration); 8667 } 8668 } 8669 8670 8671 void HOptimizedGraphBuilder::VisitModuleDeclaration( 8672 ModuleDeclaration* declaration) { 8673 UNREACHABLE(); 8674 } 8675 8676 8677 void HOptimizedGraphBuilder::VisitImportDeclaration( 8678 ImportDeclaration* declaration) { 8679 UNREACHABLE(); 8680 } 8681 8682 8683 void HOptimizedGraphBuilder::VisitExportDeclaration( 8684 ExportDeclaration* declaration) { 8685 UNREACHABLE(); 8686 } 8687 8688 8689 void HOptimizedGraphBuilder::VisitModuleLiteral(ModuleLiteral* module) { 8690 UNREACHABLE(); 8691 } 8692 8693 8694 void HOptimizedGraphBuilder::VisitModuleVariable(ModuleVariable* module) { 8695 UNREACHABLE(); 8696 } 8697 8698 8699 void HOptimizedGraphBuilder::VisitModulePath(ModulePath* module) { 8700 UNREACHABLE(); 8701 } 8702 8703 8704 void HOptimizedGraphBuilder::VisitModuleUrl(ModuleUrl* module) { 8705 UNREACHABLE(); 8706 } 8707 8708 8709 void HOptimizedGraphBuilder::VisitModuleStatement(ModuleStatement* stmt) { 8710 UNREACHABLE(); 8711 } 8712 8713 8714 // Generators for inline runtime functions. 8715 // Support for types. 8716 void HOptimizedGraphBuilder::GenerateIsSmi(CallRuntime* call) { 8717 ASSERT(call->arguments()->length() == 1); 8718 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 8719 HValue* value = Pop(); 8720 HIsSmiAndBranch* result = new(zone()) HIsSmiAndBranch(value); 8721 return ast_context()->ReturnControl(result, call->id()); 8722 } 8723 8724 8725 void HOptimizedGraphBuilder::GenerateIsSpecObject(CallRuntime* call) { 8726 ASSERT(call->arguments()->length() == 1); 8727 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 8728 HValue* value = Pop(); 8729 HHasInstanceTypeAndBranch* result = 8730 new(zone()) HHasInstanceTypeAndBranch(value, 8731 FIRST_SPEC_OBJECT_TYPE, 8732 LAST_SPEC_OBJECT_TYPE); 8733 return ast_context()->ReturnControl(result, call->id()); 8734 } 8735 8736 8737 void HOptimizedGraphBuilder::GenerateIsFunction(CallRuntime* call) { 8738 ASSERT(call->arguments()->length() == 1); 8739 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 8740 HValue* value = Pop(); 8741 HHasInstanceTypeAndBranch* result = 8742 new(zone()) HHasInstanceTypeAndBranch(value, JS_FUNCTION_TYPE); 8743 return ast_context()->ReturnControl(result, call->id()); 8744 } 8745 8746 8747 void HOptimizedGraphBuilder::GenerateHasCachedArrayIndex(CallRuntime* call) { 8748 ASSERT(call->arguments()->length() == 1); 8749 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 8750 HValue* value = Pop(); 8751 HHasCachedArrayIndexAndBranch* result = 8752 new(zone()) HHasCachedArrayIndexAndBranch(value); 8753 return ast_context()->ReturnControl(result, call->id()); 8754 } 8755 8756 8757 void HOptimizedGraphBuilder::GenerateIsArray(CallRuntime* call) { 8758 ASSERT(call->arguments()->length() == 1); 8759 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 8760 HValue* value = Pop(); 8761 HHasInstanceTypeAndBranch* result = 8762 new(zone()) HHasInstanceTypeAndBranch(value, JS_ARRAY_TYPE); 8763 return ast_context()->ReturnControl(result, call->id()); 8764 } 8765 8766 8767 void HOptimizedGraphBuilder::GenerateIsRegExp(CallRuntime* call) { 8768 ASSERT(call->arguments()->length() == 1); 8769 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 8770 HValue* value = Pop(); 8771 HHasInstanceTypeAndBranch* result = 8772 new(zone()) HHasInstanceTypeAndBranch(value, JS_REGEXP_TYPE); 8773 return ast_context()->ReturnControl(result, call->id()); 8774 } 8775 8776 8777 void HOptimizedGraphBuilder::GenerateIsObject(CallRuntime* call) { 8778 ASSERT(call->arguments()->length() == 1); 8779 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 8780 HValue* value = Pop(); 8781 HIsObjectAndBranch* result = new(zone()) HIsObjectAndBranch(value); 8782 return ast_context()->ReturnControl(result, call->id()); 8783 } 8784 8785 8786 void HOptimizedGraphBuilder::GenerateIsNonNegativeSmi(CallRuntime* call) { 8787 return Bailout(kInlinedRuntimeFunctionIsNonNegativeSmi); 8788 } 8789 8790 8791 void HOptimizedGraphBuilder::GenerateIsUndetectableObject(CallRuntime* call) { 8792 ASSERT(call->arguments()->length() == 1); 8793 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 8794 HValue* value = Pop(); 8795 HIsUndetectableAndBranch* result = 8796 new(zone()) HIsUndetectableAndBranch(value); 8797 return ast_context()->ReturnControl(result, call->id()); 8798 } 8799 8800 8801 void HOptimizedGraphBuilder::GenerateIsStringWrapperSafeForDefaultValueOf( 8802 CallRuntime* call) { 8803 return Bailout(kInlinedRuntimeFunctionIsStringWrapperSafeForDefaultValueOf); 8804 } 8805 8806 8807 // Support for construct call checks. 8808 void HOptimizedGraphBuilder::GenerateIsConstructCall(CallRuntime* call) { 8809 ASSERT(call->arguments()->length() == 0); 8810 if (function_state()->outer() != NULL) { 8811 // We are generating graph for inlined function. 8812 HValue* value = function_state()->inlining_kind() == CONSTRUCT_CALL_RETURN 8813 ? graph()->GetConstantTrue() 8814 : graph()->GetConstantFalse(); 8815 return ast_context()->ReturnValue(value); 8816 } else { 8817 return ast_context()->ReturnControl(new(zone()) HIsConstructCallAndBranch, 8818 call->id()); 8819 } 8820 } 8821 8822 8823 // Support for arguments.length and arguments[?]. 8824 void HOptimizedGraphBuilder::GenerateArgumentsLength(CallRuntime* call) { 8825 // Our implementation of arguments (based on this stack frame or an 8826 // adapter below it) does not work for inlined functions. This runtime 8827 // function is blacklisted by AstNode::IsInlineable. 8828 ASSERT(function_state()->outer() == NULL); 8829 ASSERT(call->arguments()->length() == 0); 8830 HInstruction* elements = Add<HArgumentsElements>(false); 8831 HArgumentsLength* result = New<HArgumentsLength>(elements); 8832 return ast_context()->ReturnInstruction(result, call->id()); 8833 } 8834 8835 8836 void HOptimizedGraphBuilder::GenerateArguments(CallRuntime* call) { 8837 // Our implementation of arguments (based on this stack frame or an 8838 // adapter below it) does not work for inlined functions. This runtime 8839 // function is blacklisted by AstNode::IsInlineable. 8840 ASSERT(function_state()->outer() == NULL); 8841 ASSERT(call->arguments()->length() == 1); 8842 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 8843 HValue* index = Pop(); 8844 HInstruction* elements = Add<HArgumentsElements>(false); 8845 HInstruction* length = Add<HArgumentsLength>(elements); 8846 HInstruction* checked_index = Add<HBoundsCheck>(index, length); 8847 HAccessArgumentsAt* result = 8848 new(zone()) HAccessArgumentsAt(elements, length, checked_index); 8849 return ast_context()->ReturnInstruction(result, call->id()); 8850 } 8851 8852 8853 // Support for accessing the class and value fields of an object. 8854 void HOptimizedGraphBuilder::GenerateClassOf(CallRuntime* call) { 8855 // The special form detected by IsClassOfTest is detected before we get here 8856 // and does not cause a bailout. 8857 return Bailout(kInlinedRuntimeFunctionClassOf); 8858 } 8859 8860 8861 void HOptimizedGraphBuilder::GenerateValueOf(CallRuntime* call) { 8862 ASSERT(call->arguments()->length() == 1); 8863 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 8864 HValue* value = Pop(); 8865 HValueOf* result = new(zone()) HValueOf(value); 8866 return ast_context()->ReturnInstruction(result, call->id()); 8867 } 8868 8869 8870 void HOptimizedGraphBuilder::GenerateDateField(CallRuntime* call) { 8871 ASSERT(call->arguments()->length() == 2); 8872 ASSERT_NE(NULL, call->arguments()->at(1)->AsLiteral()); 8873 Smi* index = Smi::cast(*(call->arguments()->at(1)->AsLiteral()->value())); 8874 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 8875 HValue* date = Pop(); 8876 HDateField* result = new(zone()) HDateField(date, index); 8877 return ast_context()->ReturnInstruction(result, call->id()); 8878 } 8879 8880 8881 void HOptimizedGraphBuilder::GenerateOneByteSeqStringSetChar( 8882 CallRuntime* call) { 8883 ASSERT(call->arguments()->length() == 3); 8884 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 8885 CHECK_ALIVE(VisitForValue(call->arguments()->at(1))); 8886 CHECK_ALIVE(VisitForValue(call->arguments()->at(2))); 8887 HValue* value = Pop(); 8888 HValue* index = Pop(); 8889 HValue* string = Pop(); 8890 HSeqStringSetChar* result = new(zone()) HSeqStringSetChar( 8891 String::ONE_BYTE_ENCODING, string, index, value); 8892 return ast_context()->ReturnInstruction(result, call->id()); 8893 } 8894 8895 8896 void HOptimizedGraphBuilder::GenerateTwoByteSeqStringSetChar( 8897 CallRuntime* call) { 8898 ASSERT(call->arguments()->length() == 3); 8899 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 8900 CHECK_ALIVE(VisitForValue(call->arguments()->at(1))); 8901 CHECK_ALIVE(VisitForValue(call->arguments()->at(2))); 8902 HValue* value = Pop(); 8903 HValue* index = Pop(); 8904 HValue* string = Pop(); 8905 HSeqStringSetChar* result = new(zone()) HSeqStringSetChar( 8906 String::TWO_BYTE_ENCODING, string, index, value); 8907 return ast_context()->ReturnInstruction(result, call->id()); 8908 } 8909 8910 8911 void HOptimizedGraphBuilder::GenerateSetValueOf(CallRuntime* call) { 8912 ASSERT(call->arguments()->length() == 2); 8913 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 8914 CHECK_ALIVE(VisitForValue(call->arguments()->at(1))); 8915 HValue* value = Pop(); 8916 HValue* object = Pop(); 8917 // Check if object is a not a smi. 8918 HIsSmiAndBranch* smicheck = new(zone()) HIsSmiAndBranch(object); 8919 HBasicBlock* if_smi = graph()->CreateBasicBlock(); 8920 HBasicBlock* if_heap_object = graph()->CreateBasicBlock(); 8921 HBasicBlock* join = graph()->CreateBasicBlock(); 8922 smicheck->SetSuccessorAt(0, if_smi); 8923 smicheck->SetSuccessorAt(1, if_heap_object); 8924 current_block()->Finish(smicheck); 8925 if_smi->Goto(join); 8926 8927 // Check if object is a JSValue. 8928 set_current_block(if_heap_object); 8929 HHasInstanceTypeAndBranch* typecheck = 8930 new(zone()) HHasInstanceTypeAndBranch(object, JS_VALUE_TYPE); 8931 HBasicBlock* if_js_value = graph()->CreateBasicBlock(); 8932 HBasicBlock* not_js_value = graph()->CreateBasicBlock(); 8933 typecheck->SetSuccessorAt(0, if_js_value); 8934 typecheck->SetSuccessorAt(1, not_js_value); 8935 current_block()->Finish(typecheck); 8936 not_js_value->Goto(join); 8937 8938 // Create in-object property store to kValueOffset. 8939 set_current_block(if_js_value); 8940 Add<HStoreNamedField>(object, 8941 HObjectAccess::ForJSObjectOffset(JSValue::kValueOffset), value); 8942 if_js_value->Goto(join); 8943 join->SetJoinId(call->id()); 8944 set_current_block(join); 8945 return ast_context()->ReturnValue(value); 8946 } 8947 8948 8949 // Fast support for charCodeAt(n). 8950 void HOptimizedGraphBuilder::GenerateStringCharCodeAt(CallRuntime* call) { 8951 ASSERT(call->arguments()->length() == 2); 8952 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 8953 CHECK_ALIVE(VisitForValue(call->arguments()->at(1))); 8954 HValue* index = Pop(); 8955 HValue* string = Pop(); 8956 HInstruction* result = BuildStringCharCodeAt(string, index); 8957 return ast_context()->ReturnInstruction(result, call->id()); 8958 } 8959 8960 8961 // Fast support for string.charAt(n) and string[n]. 8962 void HOptimizedGraphBuilder::GenerateStringCharFromCode(CallRuntime* call) { 8963 ASSERT(call->arguments()->length() == 1); 8964 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 8965 HValue* char_code = Pop(); 8966 HInstruction* result = New<HStringCharFromCode>(char_code); 8967 return ast_context()->ReturnInstruction(result, call->id()); 8968 } 8969 8970 8971 // Fast support for string.charAt(n) and string[n]. 8972 void HOptimizedGraphBuilder::GenerateStringCharAt(CallRuntime* call) { 8973 ASSERT(call->arguments()->length() == 2); 8974 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 8975 CHECK_ALIVE(VisitForValue(call->arguments()->at(1))); 8976 HValue* index = Pop(); 8977 HValue* string = Pop(); 8978 HInstruction* char_code = BuildStringCharCodeAt(string, index); 8979 AddInstruction(char_code); 8980 HInstruction* result = New<HStringCharFromCode>(char_code); 8981 return ast_context()->ReturnInstruction(result, call->id()); 8982 } 8983 8984 8985 // Fast support for object equality testing. 8986 void HOptimizedGraphBuilder::GenerateObjectEquals(CallRuntime* call) { 8987 ASSERT(call->arguments()->length() == 2); 8988 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 8989 CHECK_ALIVE(VisitForValue(call->arguments()->at(1))); 8990 HValue* right = Pop(); 8991 HValue* left = Pop(); 8992 HCompareObjectEqAndBranch* result = 8993 New<HCompareObjectEqAndBranch>(left, right); 8994 return ast_context()->ReturnControl(result, call->id()); 8995 } 8996 8997 8998 void HOptimizedGraphBuilder::GenerateLog(CallRuntime* call) { 8999 // %_Log is ignored in optimized code. 9000 return ast_context()->ReturnValue(graph()->GetConstantUndefined()); 9001 } 9002 9003 9004 // Fast support for Math.random(). 9005 void HOptimizedGraphBuilder::GenerateRandomHeapNumber(CallRuntime* call) { 9006 HGlobalObject* global_object = Add<HGlobalObject>(); 9007 HRandom* result = new(zone()) HRandom(global_object); 9008 return ast_context()->ReturnInstruction(result, call->id()); 9009 } 9010 9011 9012 // Fast support for StringAdd. 9013 void HOptimizedGraphBuilder::GenerateStringAdd(CallRuntime* call) { 9014 ASSERT_EQ(2, call->arguments()->length()); 9015 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 9016 CHECK_ALIVE(VisitForValue(call->arguments()->at(1))); 9017 HValue* right = Pop(); 9018 HValue* left = Pop(); 9019 HValue* context = environment()->context(); 9020 HInstruction* result = HStringAdd::New( 9021 zone(), context, left, right, STRING_ADD_CHECK_BOTH); 9022 return ast_context()->ReturnInstruction(result, call->id()); 9023 } 9024 9025 9026 // Fast support for SubString. 9027 void HOptimizedGraphBuilder::GenerateSubString(CallRuntime* call) { 9028 ASSERT_EQ(3, call->arguments()->length()); 9029 CHECK_ALIVE(VisitArgumentList(call->arguments())); 9030 HValue* context = environment()->context(); 9031 HCallStub* result = new(zone()) HCallStub(context, CodeStub::SubString, 3); 9032 Drop(3); 9033 return ast_context()->ReturnInstruction(result, call->id()); 9034 } 9035 9036 9037 // Fast support for StringCompare. 9038 void HOptimizedGraphBuilder::GenerateStringCompare(CallRuntime* call) { 9039 ASSERT_EQ(2, call->arguments()->length()); 9040 CHECK_ALIVE(VisitArgumentList(call->arguments())); 9041 HValue* context = environment()->context(); 9042 HCallStub* result = 9043 new(zone()) HCallStub(context, CodeStub::StringCompare, 2); 9044 Drop(2); 9045 return ast_context()->ReturnInstruction(result, call->id()); 9046 } 9047 9048 9049 // Support for direct calls from JavaScript to native RegExp code. 9050 void HOptimizedGraphBuilder::GenerateRegExpExec(CallRuntime* call) { 9051 ASSERT_EQ(4, call->arguments()->length()); 9052 CHECK_ALIVE(VisitArgumentList(call->arguments())); 9053 HValue* context = environment()->context(); 9054 HCallStub* result = new(zone()) HCallStub(context, CodeStub::RegExpExec, 4); 9055 Drop(4); 9056 return ast_context()->ReturnInstruction(result, call->id()); 9057 } 9058 9059 9060 // Construct a RegExp exec result with two in-object properties. 9061 void HOptimizedGraphBuilder::GenerateRegExpConstructResult(CallRuntime* call) { 9062 ASSERT_EQ(3, call->arguments()->length()); 9063 CHECK_ALIVE(VisitArgumentList(call->arguments())); 9064 HValue* context = environment()->context(); 9065 HCallStub* result = 9066 new(zone()) HCallStub(context, CodeStub::RegExpConstructResult, 3); 9067 Drop(3); 9068 return ast_context()->ReturnInstruction(result, call->id()); 9069 } 9070 9071 9072 // Support for fast native caches. 9073 void HOptimizedGraphBuilder::GenerateGetFromCache(CallRuntime* call) { 9074 return Bailout(kInlinedRuntimeFunctionGetFromCache); 9075 } 9076 9077 9078 // Fast support for number to string. 9079 void HOptimizedGraphBuilder::GenerateNumberToString(CallRuntime* call) { 9080 ASSERT_EQ(1, call->arguments()->length()); 9081 CHECK_ALIVE(VisitArgumentList(call->arguments())); 9082 HValue* context = environment()->context(); 9083 HCallStub* result = 9084 new(zone()) HCallStub(context, CodeStub::NumberToString, 1); 9085 Drop(1); 9086 return ast_context()->ReturnInstruction(result, call->id()); 9087 } 9088 9089 9090 // Fast call for custom callbacks. 9091 void HOptimizedGraphBuilder::GenerateCallFunction(CallRuntime* call) { 9092 // 1 ~ The function to call is not itself an argument to the call. 9093 int arg_count = call->arguments()->length() - 1; 9094 ASSERT(arg_count >= 1); // There's always at least a receiver. 9095 9096 for (int i = 0; i < arg_count; ++i) { 9097 CHECK_ALIVE(VisitArgument(call->arguments()->at(i))); 9098 } 9099 CHECK_ALIVE(VisitForValue(call->arguments()->last())); 9100 9101 HValue* function = Pop(); 9102 9103 // Branch for function proxies, or other non-functions. 9104 HHasInstanceTypeAndBranch* typecheck = 9105 new(zone()) HHasInstanceTypeAndBranch(function, JS_FUNCTION_TYPE); 9106 HBasicBlock* if_jsfunction = graph()->CreateBasicBlock(); 9107 HBasicBlock* if_nonfunction = graph()->CreateBasicBlock(); 9108 HBasicBlock* join = graph()->CreateBasicBlock(); 9109 typecheck->SetSuccessorAt(0, if_jsfunction); 9110 typecheck->SetSuccessorAt(1, if_nonfunction); 9111 current_block()->Finish(typecheck); 9112 9113 set_current_block(if_jsfunction); 9114 HInstruction* invoke_result = Add<HInvokeFunction>(function, arg_count); 9115 Drop(arg_count); 9116 Push(invoke_result); 9117 if_jsfunction->Goto(join); 9118 9119 set_current_block(if_nonfunction); 9120 HInstruction* call_result = Add<HCallFunction>(function, arg_count); 9121 Drop(arg_count); 9122 Push(call_result); 9123 if_nonfunction->Goto(join); 9124 9125 set_current_block(join); 9126 join->SetJoinId(call->id()); 9127 return ast_context()->ReturnValue(Pop()); 9128 } 9129 9130 9131 // Fast call to math functions. 9132 void HOptimizedGraphBuilder::GenerateMathPow(CallRuntime* call) { 9133 ASSERT_EQ(2, call->arguments()->length()); 9134 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 9135 CHECK_ALIVE(VisitForValue(call->arguments()->at(1))); 9136 HValue* right = Pop(); 9137 HValue* left = Pop(); 9138 HInstruction* result = HPower::New(zone(), context(), left, right); 9139 return ast_context()->ReturnInstruction(result, call->id()); 9140 } 9141 9142 9143 void HOptimizedGraphBuilder::GenerateMathSin(CallRuntime* call) { 9144 ASSERT_EQ(1, call->arguments()->length()); 9145 CHECK_ALIVE(VisitArgumentList(call->arguments())); 9146 HValue* context = environment()->context(); 9147 HCallStub* result = 9148 new(zone()) HCallStub(context, CodeStub::TranscendentalCache, 1); 9149 result->set_transcendental_type(TranscendentalCache::SIN); 9150 Drop(1); 9151 return ast_context()->ReturnInstruction(result, call->id()); 9152 } 9153 9154 9155 void HOptimizedGraphBuilder::GenerateMathCos(CallRuntime* call) { 9156 ASSERT_EQ(1, call->arguments()->length()); 9157 CHECK_ALIVE(VisitArgumentList(call->arguments())); 9158 HValue* context = environment()->context(); 9159 HCallStub* result = 9160 new(zone()) HCallStub(context, CodeStub::TranscendentalCache, 1); 9161 result->set_transcendental_type(TranscendentalCache::COS); 9162 Drop(1); 9163 return ast_context()->ReturnInstruction(result, call->id()); 9164 } 9165 9166 9167 void HOptimizedGraphBuilder::GenerateMathTan(CallRuntime* call) { 9168 ASSERT_EQ(1, call->arguments()->length()); 9169 CHECK_ALIVE(VisitArgumentList(call->arguments())); 9170 HValue* context = environment()->context(); 9171 HCallStub* result = 9172 new(zone()) HCallStub(context, CodeStub::TranscendentalCache, 1); 9173 result->set_transcendental_type(TranscendentalCache::TAN); 9174 Drop(1); 9175 return ast_context()->ReturnInstruction(result, call->id()); 9176 } 9177 9178 9179 void HOptimizedGraphBuilder::GenerateMathLog(CallRuntime* call) { 9180 ASSERT_EQ(1, call->arguments()->length()); 9181 CHECK_ALIVE(VisitArgumentList(call->arguments())); 9182 HValue* context = environment()->context(); 9183 HCallStub* result = 9184 new(zone()) HCallStub(context, CodeStub::TranscendentalCache, 1); 9185 result->set_transcendental_type(TranscendentalCache::LOG); 9186 Drop(1); 9187 return ast_context()->ReturnInstruction(result, call->id()); 9188 } 9189 9190 9191 void HOptimizedGraphBuilder::GenerateMathSqrt(CallRuntime* call) { 9192 ASSERT(call->arguments()->length() == 1); 9193 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 9194 HValue* value = Pop(); 9195 HValue* context = environment()->context(); 9196 HInstruction* result = 9197 HUnaryMathOperation::New(zone(), context, value, kMathSqrt); 9198 return ast_context()->ReturnInstruction(result, call->id()); 9199 } 9200 9201 9202 // Check whether two RegExps are equivalent 9203 void HOptimizedGraphBuilder::GenerateIsRegExpEquivalent(CallRuntime* call) { 9204 return Bailout(kInlinedRuntimeFunctionIsRegExpEquivalent); 9205 } 9206 9207 9208 void HOptimizedGraphBuilder::GenerateGetCachedArrayIndex(CallRuntime* call) { 9209 ASSERT(call->arguments()->length() == 1); 9210 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 9211 HValue* value = Pop(); 9212 HGetCachedArrayIndex* result = new(zone()) HGetCachedArrayIndex(value); 9213 return ast_context()->ReturnInstruction(result, call->id()); 9214 } 9215 9216 9217 void HOptimizedGraphBuilder::GenerateFastAsciiArrayJoin(CallRuntime* call) { 9218 return Bailout(kInlinedRuntimeFunctionFastAsciiArrayJoin); 9219 } 9220 9221 9222 // Support for generators. 9223 void HOptimizedGraphBuilder::GenerateGeneratorNext(CallRuntime* call) { 9224 return Bailout(kInlinedRuntimeFunctionGeneratorNext); 9225 } 9226 9227 9228 void HOptimizedGraphBuilder::GenerateGeneratorThrow(CallRuntime* call) { 9229 return Bailout(kInlinedRuntimeFunctionGeneratorThrow); 9230 } 9231 9232 9233 void HOptimizedGraphBuilder::GenerateDebugBreakInOptimizedCode( 9234 CallRuntime* call) { 9235 AddInstruction(new(zone()) HDebugBreak()); 9236 return ast_context()->ReturnValue(graph()->GetConstant0()); 9237 } 9238 9239 9240 #undef CHECK_BAILOUT 9241 #undef CHECK_ALIVE 9242 9243 9244 HEnvironment::HEnvironment(HEnvironment* outer, 9245 Scope* scope, 9246 Handle<JSFunction> closure, 9247 Zone* zone) 9248 : closure_(closure), 9249 values_(0, zone), 9250 frame_type_(JS_FUNCTION), 9251 parameter_count_(0), 9252 specials_count_(1), 9253 local_count_(0), 9254 outer_(outer), 9255 entry_(NULL), 9256 pop_count_(0), 9257 push_count_(0), 9258 ast_id_(BailoutId::None()), 9259 zone_(zone) { 9260 Initialize(scope->num_parameters() + 1, scope->num_stack_slots(), 0); 9261 } 9262 9263 9264 HEnvironment::HEnvironment(Zone* zone, int parameter_count) 9265 : values_(0, zone), 9266 frame_type_(STUB), 9267 parameter_count_(parameter_count), 9268 specials_count_(1), 9269 local_count_(0), 9270 outer_(NULL), 9271 entry_(NULL), 9272 pop_count_(0), 9273 push_count_(0), 9274 ast_id_(BailoutId::None()), 9275 zone_(zone) { 9276 Initialize(parameter_count, 0, 0); 9277 } 9278 9279 9280 HEnvironment::HEnvironment(const HEnvironment* other, Zone* zone) 9281 : values_(0, zone), 9282 frame_type_(JS_FUNCTION), 9283 parameter_count_(0), 9284 specials_count_(0), 9285 local_count_(0), 9286 outer_(NULL), 9287 entry_(NULL), 9288 pop_count_(0), 9289 push_count_(0), 9290 ast_id_(other->ast_id()), 9291 zone_(zone) { 9292 Initialize(other); 9293 } 9294 9295 9296 HEnvironment::HEnvironment(HEnvironment* outer, 9297 Handle<JSFunction> closure, 9298 FrameType frame_type, 9299 int arguments, 9300 Zone* zone) 9301 : closure_(closure), 9302 values_(arguments, zone), 9303 frame_type_(frame_type), 9304 parameter_count_(arguments), 9305 specials_count_(0), 9306 local_count_(0), 9307 outer_(outer), 9308 entry_(NULL), 9309 pop_count_(0), 9310 push_count_(0), 9311 ast_id_(BailoutId::None()), 9312 zone_(zone) { 9313 } 9314 9315 9316 void HEnvironment::Initialize(int parameter_count, 9317 int local_count, 9318 int stack_height) { 9319 parameter_count_ = parameter_count; 9320 local_count_ = local_count; 9321 9322 // Avoid reallocating the temporaries' backing store on the first Push. 9323 int total = parameter_count + specials_count_ + local_count + stack_height; 9324 values_.Initialize(total + 4, zone()); 9325 for (int i = 0; i < total; ++i) values_.Add(NULL, zone()); 9326 } 9327 9328 9329 void HEnvironment::Initialize(const HEnvironment* other) { 9330 closure_ = other->closure(); 9331 values_.AddAll(other->values_, zone()); 9332 assigned_variables_.Union(other->assigned_variables_, zone()); 9333 frame_type_ = other->frame_type_; 9334 parameter_count_ = other->parameter_count_; 9335 local_count_ = other->local_count_; 9336 if (other->outer_ != NULL) outer_ = other->outer_->Copy(); // Deep copy. 9337 entry_ = other->entry_; 9338 pop_count_ = other->pop_count_; 9339 push_count_ = other->push_count_; 9340 specials_count_ = other->specials_count_; 9341 ast_id_ = other->ast_id_; 9342 } 9343 9344 9345 void HEnvironment::AddIncomingEdge(HBasicBlock* block, HEnvironment* other) { 9346 ASSERT(!block->IsLoopHeader()); 9347 ASSERT(values_.length() == other->values_.length()); 9348 9349 int length = values_.length(); 9350 for (int i = 0; i < length; ++i) { 9351 HValue* value = values_[i]; 9352 if (value != NULL && value->IsPhi() && value->block() == block) { 9353 // There is already a phi for the i'th value. 9354 HPhi* phi = HPhi::cast(value); 9355 // Assert index is correct and that we haven't missed an incoming edge. 9356 ASSERT(phi->merged_index() == i || !phi->HasMergedIndex()); 9357 ASSERT(phi->OperandCount() == block->predecessors()->length()); 9358 phi->AddInput(other->values_[i]); 9359 } else if (values_[i] != other->values_[i]) { 9360 // There is a fresh value on the incoming edge, a phi is needed. 9361 ASSERT(values_[i] != NULL && other->values_[i] != NULL); 9362 HPhi* phi = block->AddNewPhi(i); 9363 HValue* old_value = values_[i]; 9364 for (int j = 0; j < block->predecessors()->length(); j++) { 9365 phi->AddInput(old_value); 9366 } 9367 phi->AddInput(other->values_[i]); 9368 this->values_[i] = phi; 9369 } 9370 } 9371 } 9372 9373 9374 void HEnvironment::Bind(int index, HValue* value) { 9375 ASSERT(value != NULL); 9376 assigned_variables_.Add(index, zone()); 9377 values_[index] = value; 9378 } 9379 9380 9381 bool HEnvironment::HasExpressionAt(int index) const { 9382 return index >= parameter_count_ + specials_count_ + local_count_; 9383 } 9384 9385 9386 bool HEnvironment::ExpressionStackIsEmpty() const { 9387 ASSERT(length() >= first_expression_index()); 9388 return length() == first_expression_index(); 9389 } 9390 9391 9392 void HEnvironment::SetExpressionStackAt(int index_from_top, HValue* value) { 9393 int count = index_from_top + 1; 9394 int index = values_.length() - count; 9395 ASSERT(HasExpressionAt(index)); 9396 // The push count must include at least the element in question or else 9397 // the new value will not be included in this environment's history. 9398 if (push_count_ < count) { 9399 // This is the same effect as popping then re-pushing 'count' elements. 9400 pop_count_ += (count - push_count_); 9401 push_count_ = count; 9402 } 9403 values_[index] = value; 9404 } 9405 9406 9407 void HEnvironment::Drop(int count) { 9408 for (int i = 0; i < count; ++i) { 9409 Pop(); 9410 } 9411 } 9412 9413 9414 HEnvironment* HEnvironment::Copy() const { 9415 return new(zone()) HEnvironment(this, zone()); 9416 } 9417 9418 9419 HEnvironment* HEnvironment::CopyWithoutHistory() const { 9420 HEnvironment* result = Copy(); 9421 result->ClearHistory(); 9422 return result; 9423 } 9424 9425 9426 HEnvironment* HEnvironment::CopyAsLoopHeader(HBasicBlock* loop_header) const { 9427 HEnvironment* new_env = Copy(); 9428 for (int i = 0; i < values_.length(); ++i) { 9429 HPhi* phi = loop_header->AddNewPhi(i); 9430 phi->AddInput(values_[i]); 9431 new_env->values_[i] = phi; 9432 } 9433 new_env->ClearHistory(); 9434 return new_env; 9435 } 9436 9437 9438 HEnvironment* HEnvironment::CreateStubEnvironment(HEnvironment* outer, 9439 Handle<JSFunction> target, 9440 FrameType frame_type, 9441 int arguments) const { 9442 HEnvironment* new_env = 9443 new(zone()) HEnvironment(outer, target, frame_type, 9444 arguments + 1, zone()); 9445 for (int i = 0; i <= arguments; ++i) { // Include receiver. 9446 new_env->Push(ExpressionStackAt(arguments - i)); 9447 } 9448 new_env->ClearHistory(); 9449 return new_env; 9450 } 9451 9452 9453 HEnvironment* HEnvironment::CopyForInlining( 9454 Handle<JSFunction> target, 9455 int arguments, 9456 FunctionLiteral* function, 9457 HConstant* undefined, 9458 InliningKind inlining_kind, 9459 bool undefined_receiver) const { 9460 ASSERT(frame_type() == JS_FUNCTION); 9461 9462 // Outer environment is a copy of this one without the arguments. 9463 int arity = function->scope()->num_parameters(); 9464 9465 HEnvironment* outer = Copy(); 9466 outer->Drop(arguments + 1); // Including receiver. 9467 outer->ClearHistory(); 9468 9469 if (inlining_kind == CONSTRUCT_CALL_RETURN) { 9470 // Create artificial constructor stub environment. The receiver should 9471 // actually be the constructor function, but we pass the newly allocated 9472 // object instead, DoComputeConstructStubFrame() relies on that. 9473 outer = CreateStubEnvironment(outer, target, JS_CONSTRUCT, arguments); 9474 } else if (inlining_kind == GETTER_CALL_RETURN) { 9475 // We need an additional StackFrame::INTERNAL frame for restoring the 9476 // correct context. 9477 outer = CreateStubEnvironment(outer, target, JS_GETTER, arguments); 9478 } else if (inlining_kind == SETTER_CALL_RETURN) { 9479 // We need an additional StackFrame::INTERNAL frame for temporarily saving 9480 // the argument of the setter, see StoreStubCompiler::CompileStoreViaSetter. 9481 outer = CreateStubEnvironment(outer, target, JS_SETTER, arguments); 9482 } 9483 9484 if (arity != arguments) { 9485 // Create artificial arguments adaptation environment. 9486 outer = CreateStubEnvironment(outer, target, ARGUMENTS_ADAPTOR, arguments); 9487 } 9488 9489 HEnvironment* inner = 9490 new(zone()) HEnvironment(outer, function->scope(), target, zone()); 9491 // Get the argument values from the original environment. 9492 for (int i = 0; i <= arity; ++i) { // Include receiver. 9493 HValue* push = (i <= arguments) ? 9494 ExpressionStackAt(arguments - i) : undefined; 9495 inner->SetValueAt(i, push); 9496 } 9497 // If the function we are inlining is a strict mode function or a 9498 // builtin function, pass undefined as the receiver for function 9499 // calls (instead of the global receiver). 9500 if (undefined_receiver) { 9501 inner->SetValueAt(0, undefined); 9502 } 9503 inner->SetValueAt(arity + 1, context()); 9504 for (int i = arity + 2; i < inner->length(); ++i) { 9505 inner->SetValueAt(i, undefined); 9506 } 9507 9508 inner->set_ast_id(BailoutId::FunctionEntry()); 9509 return inner; 9510 } 9511 9512 9513 void HEnvironment::PrintTo(StringStream* stream) { 9514 for (int i = 0; i < length(); i++) { 9515 if (i == 0) stream->Add("parameters\n"); 9516 if (i == parameter_count()) stream->Add("specials\n"); 9517 if (i == parameter_count() + specials_count()) stream->Add("locals\n"); 9518 if (i == parameter_count() + specials_count() + local_count()) { 9519 stream->Add("expressions\n"); 9520 } 9521 HValue* val = values_.at(i); 9522 stream->Add("%d: ", i); 9523 if (val != NULL) { 9524 val->PrintNameTo(stream); 9525 } else { 9526 stream->Add("NULL"); 9527 } 9528 stream->Add("\n"); 9529 } 9530 PrintF("\n"); 9531 } 9532 9533 9534 void HEnvironment::PrintToStd() { 9535 HeapStringAllocator string_allocator; 9536 StringStream trace(&string_allocator); 9537 PrintTo(&trace); 9538 PrintF("%s", *trace.ToCString()); 9539 } 9540 9541 9542 void HTracer::TraceCompilation(CompilationInfo* info) { 9543 Tag tag(this, "compilation"); 9544 if (info->IsOptimizing()) { 9545 Handle<String> name = info->function()->debug_name(); 9546 PrintStringProperty("name", *name->ToCString()); 9547 PrintStringProperty("method", *name->ToCString()); 9548 } else { 9549 CodeStub::Major major_key = info->code_stub()->MajorKey(); 9550 PrintStringProperty("name", CodeStub::MajorName(major_key, false)); 9551 PrintStringProperty("method", "stub"); 9552 } 9553 PrintLongProperty("date", static_cast<int64_t>(OS::TimeCurrentMillis())); 9554 } 9555 9556 9557 void HTracer::TraceLithium(const char* name, LChunk* chunk) { 9558 ASSERT(!FLAG_parallel_recompilation); 9559 AllowHandleDereference allow_deref; 9560 AllowDeferredHandleDereference allow_deferred_deref; 9561 Trace(name, chunk->graph(), chunk); 9562 } 9563 9564 9565 void HTracer::TraceHydrogen(const char* name, HGraph* graph) { 9566 ASSERT(!FLAG_parallel_recompilation); 9567 AllowHandleDereference allow_deref; 9568 AllowDeferredHandleDereference allow_deferred_deref; 9569 Trace(name, graph, NULL); 9570 } 9571 9572 9573 void HTracer::Trace(const char* name, HGraph* graph, LChunk* chunk) { 9574 Tag tag(this, "cfg"); 9575 PrintStringProperty("name", name); 9576 const ZoneList<HBasicBlock*>* blocks = graph->blocks(); 9577 for (int i = 0; i < blocks->length(); i++) { 9578 HBasicBlock* current = blocks->at(i); 9579 Tag block_tag(this, "block"); 9580 PrintBlockProperty("name", current->block_id()); 9581 PrintIntProperty("from_bci", -1); 9582 PrintIntProperty("to_bci", -1); 9583 9584 if (!current->predecessors()->is_empty()) { 9585 PrintIndent(); 9586 trace_.Add("predecessors"); 9587 for (int j = 0; j < current->predecessors()->length(); ++j) { 9588 trace_.Add(" \"B%d\"", current->predecessors()->at(j)->block_id()); 9589 } 9590 trace_.Add("\n"); 9591 } else { 9592 PrintEmptyProperty("predecessors"); 9593 } 9594 9595 if (current->end()->SuccessorCount() == 0) { 9596 PrintEmptyProperty("successors"); 9597 } else { 9598 PrintIndent(); 9599 trace_.Add("successors"); 9600 for (HSuccessorIterator it(current->end()); !it.Done(); it.Advance()) { 9601 trace_.Add(" \"B%d\"", it.Current()->block_id()); 9602 } 9603 trace_.Add("\n"); 9604 } 9605 9606 PrintEmptyProperty("xhandlers"); 9607 const char* flags = current->IsLoopSuccessorDominator() 9608 ? "dom-loop-succ" 9609 : ""; 9610 PrintStringProperty("flags", flags); 9611 9612 if (current->dominator() != NULL) { 9613 PrintBlockProperty("dominator", current->dominator()->block_id()); 9614 } 9615 9616 PrintIntProperty("loop_depth", current->LoopNestingDepth()); 9617 9618 if (chunk != NULL) { 9619 int first_index = current->first_instruction_index(); 9620 int last_index = current->last_instruction_index(); 9621 PrintIntProperty( 9622 "first_lir_id", 9623 LifetimePosition::FromInstructionIndex(first_index).Value()); 9624 PrintIntProperty( 9625 "last_lir_id", 9626 LifetimePosition::FromInstructionIndex(last_index).Value()); 9627 } 9628 9629 { 9630 Tag states_tag(this, "states"); 9631 Tag locals_tag(this, "locals"); 9632 int total = current->phis()->length(); 9633 PrintIntProperty("size", current->phis()->length()); 9634 PrintStringProperty("method", "None"); 9635 for (int j = 0; j < total; ++j) { 9636 HPhi* phi = current->phis()->at(j); 9637 PrintIndent(); 9638 trace_.Add("%d ", phi->merged_index()); 9639 phi->PrintNameTo(&trace_); 9640 trace_.Add(" "); 9641 phi->PrintTo(&trace_); 9642 trace_.Add("\n"); 9643 } 9644 } 9645 9646 { 9647 Tag HIR_tag(this, "HIR"); 9648 for (HInstructionIterator it(current); !it.Done(); it.Advance()) { 9649 HInstruction* instruction = it.Current(); 9650 int bci = 0; 9651 int uses = instruction->UseCount(); 9652 PrintIndent(); 9653 trace_.Add("%d %d ", bci, uses); 9654 instruction->PrintNameTo(&trace_); 9655 trace_.Add(" "); 9656 instruction->PrintTo(&trace_); 9657 trace_.Add(" <|@\n"); 9658 } 9659 } 9660 9661 9662 if (chunk != NULL) { 9663 Tag LIR_tag(this, "LIR"); 9664 int first_index = current->first_instruction_index(); 9665 int last_index = current->last_instruction_index(); 9666 if (first_index != -1 && last_index != -1) { 9667 const ZoneList<LInstruction*>* instructions = chunk->instructions(); 9668 for (int i = first_index; i <= last_index; ++i) { 9669 LInstruction* linstr = instructions->at(i); 9670 if (linstr != NULL) { 9671 PrintIndent(); 9672 trace_.Add("%d ", 9673 LifetimePosition::FromInstructionIndex(i).Value()); 9674 linstr->PrintTo(&trace_); 9675 trace_.Add(" <|@\n"); 9676 } 9677 } 9678 } 9679 } 9680 } 9681 } 9682 9683 9684 void HTracer::TraceLiveRanges(const char* name, LAllocator* allocator) { 9685 Tag tag(this, "intervals"); 9686 PrintStringProperty("name", name); 9687 9688 const Vector<LiveRange*>* fixed_d = allocator->fixed_double_live_ranges(); 9689 for (int i = 0; i < fixed_d->length(); ++i) { 9690 TraceLiveRange(fixed_d->at(i), "fixed", allocator->zone()); 9691 } 9692 9693 const Vector<LiveRange*>* fixed = allocator->fixed_live_ranges(); 9694 for (int i = 0; i < fixed->length(); ++i) { 9695 TraceLiveRange(fixed->at(i), "fixed", allocator->zone()); 9696 } 9697 9698 const ZoneList<LiveRange*>* live_ranges = allocator->live_ranges(); 9699 for (int i = 0; i < live_ranges->length(); ++i) { 9700 TraceLiveRange(live_ranges->at(i), "object", allocator->zone()); 9701 } 9702 } 9703 9704 9705 void HTracer::TraceLiveRange(LiveRange* range, const char* type, 9706 Zone* zone) { 9707 if (range != NULL && !range->IsEmpty()) { 9708 PrintIndent(); 9709 trace_.Add("%d %s", range->id(), type); 9710 if (range->HasRegisterAssigned()) { 9711 LOperand* op = range->CreateAssignedOperand(zone); 9712 int assigned_reg = op->index(); 9713 if (op->IsDoubleRegister()) { 9714 trace_.Add(" \"%s\"", 9715 DoubleRegister::AllocationIndexToString(assigned_reg)); 9716 } else { 9717 ASSERT(op->IsRegister()); 9718 trace_.Add(" \"%s\"", Register::AllocationIndexToString(assigned_reg)); 9719 } 9720 } else if (range->IsSpilled()) { 9721 LOperand* op = range->TopLevel()->GetSpillOperand(); 9722 if (op->IsDoubleStackSlot()) { 9723 trace_.Add(" \"double_stack:%d\"", op->index()); 9724 } else { 9725 ASSERT(op->IsStackSlot()); 9726 trace_.Add(" \"stack:%d\"", op->index()); 9727 } 9728 } 9729 int parent_index = -1; 9730 if (range->IsChild()) { 9731 parent_index = range->parent()->id(); 9732 } else { 9733 parent_index = range->id(); 9734 } 9735 LOperand* op = range->FirstHint(); 9736 int hint_index = -1; 9737 if (op != NULL && op->IsUnallocated()) { 9738 hint_index = LUnallocated::cast(op)->virtual_register(); 9739 } 9740 trace_.Add(" %d %d", parent_index, hint_index); 9741 UseInterval* cur_interval = range->first_interval(); 9742 while (cur_interval != NULL && range->Covers(cur_interval->start())) { 9743 trace_.Add(" [%d, %d[", 9744 cur_interval->start().Value(), 9745 cur_interval->end().Value()); 9746 cur_interval = cur_interval->next(); 9747 } 9748 9749 UsePosition* current_pos = range->first_pos(); 9750 while (current_pos != NULL) { 9751 if (current_pos->RegisterIsBeneficial() || FLAG_trace_all_uses) { 9752 trace_.Add(" %d M", current_pos->pos().Value()); 9753 } 9754 current_pos = current_pos->next(); 9755 } 9756 9757 trace_.Add(" \"\"\n"); 9758 } 9759 } 9760 9761 9762 void HTracer::FlushToFile() { 9763 AppendChars(filename_.start(), *trace_.ToCString(), trace_.length(), false); 9764 trace_.Reset(); 9765 } 9766 9767 9768 void HStatistics::Initialize(CompilationInfo* info) { 9769 if (info->shared_info().is_null()) return; 9770 source_size_ += info->shared_info()->SourceSize(); 9771 } 9772 9773 9774 void HStatistics::Print() { 9775 PrintF("Timing results:\n"); 9776 int64_t sum = 0; 9777 for (int i = 0; i < timing_.length(); ++i) { 9778 sum += timing_[i]; 9779 } 9780 9781 for (int i = 0; i < names_.length(); ++i) { 9782 PrintF("%32s", names_[i]); 9783 double ms = static_cast<double>(timing_[i]) / 1000; 9784 double percent = static_cast<double>(timing_[i]) * 100 / sum; 9785 PrintF(" %8.3f ms / %4.1f %% ", ms, percent); 9786 9787 unsigned size = sizes_[i]; 9788 double size_percent = static_cast<double>(size) * 100 / total_size_; 9789 PrintF(" %9u bytes / %4.1f %%\n", size, size_percent); 9790 } 9791 9792 PrintF("----------------------------------------" 9793 "---------------------------------------\n"); 9794 int64_t total = create_graph_ + optimize_graph_ + generate_code_; 9795 PrintF("%32s %8.3f ms / %4.1f %% \n", 9796 "Create graph", 9797 static_cast<double>(create_graph_) / 1000, 9798 static_cast<double>(create_graph_) * 100 / total); 9799 PrintF("%32s %8.3f ms / %4.1f %% \n", 9800 "Optimize graph", 9801 static_cast<double>(optimize_graph_) / 1000, 9802 static_cast<double>(optimize_graph_) * 100 / total); 9803 PrintF("%32s %8.3f ms / %4.1f %% \n", 9804 "Generate and install code", 9805 static_cast<double>(generate_code_) / 1000, 9806 static_cast<double>(generate_code_) * 100 / total); 9807 PrintF("----------------------------------------" 9808 "---------------------------------------\n"); 9809 PrintF("%32s %8.3f ms (%.1f times slower than full code gen)\n", 9810 "Total", 9811 static_cast<double>(total) / 1000, 9812 static_cast<double>(total) / full_code_gen_); 9813 9814 double source_size_in_kb = static_cast<double>(source_size_) / 1024; 9815 double normalized_time = source_size_in_kb > 0 9816 ? (static_cast<double>(total) / 1000) / source_size_in_kb 9817 : 0; 9818 double normalized_size_in_kb = source_size_in_kb > 0 9819 ? total_size_ / 1024 / source_size_in_kb 9820 : 0; 9821 PrintF("%32s %8.3f ms %7.3f kB allocated\n", 9822 "Average per kB source", 9823 normalized_time, normalized_size_in_kb); 9824 } 9825 9826 9827 void HStatistics::SaveTiming(const char* name, int64_t ticks, unsigned size) { 9828 total_size_ += size; 9829 for (int i = 0; i < names_.length(); ++i) { 9830 if (strcmp(names_[i], name) == 0) { 9831 timing_[i] += ticks; 9832 sizes_[i] += size; 9833 return; 9834 } 9835 } 9836 names_.Add(name); 9837 timing_.Add(ticks); 9838 sizes_.Add(size); 9839 } 9840 9841 9842 HPhase::~HPhase() { 9843 if (ShouldProduceTraceOutput()) { 9844 isolate()->GetHTracer()->TraceHydrogen(name(), graph_); 9845 } 9846 9847 #ifdef DEBUG 9848 graph_->Verify(false); // No full verify. 9849 #endif 9850 } 9851 9852 } } // namespace v8::internal 9853