1 // Copyright 2013 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #include "src/hydrogen.h" 6 7 #include <algorithm> 8 9 #include "src/v8.h" 10 #include "src/allocation-site-scopes.h" 11 #include "src/codegen.h" 12 #include "src/full-codegen.h" 13 #include "src/hashmap.h" 14 #include "src/hydrogen-bce.h" 15 #include "src/hydrogen-bch.h" 16 #include "src/hydrogen-canonicalize.h" 17 #include "src/hydrogen-check-elimination.h" 18 #include "src/hydrogen-dce.h" 19 #include "src/hydrogen-dehoist.h" 20 #include "src/hydrogen-environment-liveness.h" 21 #include "src/hydrogen-escape-analysis.h" 22 #include "src/hydrogen-infer-representation.h" 23 #include "src/hydrogen-infer-types.h" 24 #include "src/hydrogen-load-elimination.h" 25 #include "src/hydrogen-gvn.h" 26 #include "src/hydrogen-mark-deoptimize.h" 27 #include "src/hydrogen-mark-unreachable.h" 28 #include "src/hydrogen-osr.h" 29 #include "src/hydrogen-range-analysis.h" 30 #include "src/hydrogen-redundant-phi.h" 31 #include "src/hydrogen-removable-simulates.h" 32 #include "src/hydrogen-representation-changes.h" 33 #include "src/hydrogen-sce.h" 34 #include "src/hydrogen-store-elimination.h" 35 #include "src/hydrogen-uint32-analysis.h" 36 #include "src/lithium-allocator.h" 37 #include "src/parser.h" 38 #include "src/runtime.h" 39 #include "src/scopeinfo.h" 40 #include "src/scopes.h" 41 #include "src/stub-cache.h" 42 #include "src/typing.h" 43 44 #if V8_TARGET_ARCH_IA32 45 #include "src/ia32/lithium-codegen-ia32.h" 46 #elif V8_TARGET_ARCH_X64 47 #include "src/x64/lithium-codegen-x64.h" 48 #elif V8_TARGET_ARCH_ARM64 49 #include "src/arm64/lithium-codegen-arm64.h" 50 #elif V8_TARGET_ARCH_ARM 51 #include "src/arm/lithium-codegen-arm.h" 52 #elif V8_TARGET_ARCH_MIPS 53 #include "src/mips/lithium-codegen-mips.h" 54 #elif V8_TARGET_ARCH_X87 55 #include "src/x87/lithium-codegen-x87.h" 56 #else 57 #error Unsupported target architecture. 58 #endif 59 60 namespace v8 { 61 namespace internal { 62 63 HBasicBlock::HBasicBlock(HGraph* graph) 64 : block_id_(graph->GetNextBlockID()), 65 graph_(graph), 66 phis_(4, graph->zone()), 67 first_(NULL), 68 last_(NULL), 69 end_(NULL), 70 loop_information_(NULL), 71 predecessors_(2, graph->zone()), 72 dominator_(NULL), 73 dominated_blocks_(4, graph->zone()), 74 last_environment_(NULL), 75 argument_count_(-1), 76 first_instruction_index_(-1), 77 last_instruction_index_(-1), 78 deleted_phis_(4, graph->zone()), 79 parent_loop_header_(NULL), 80 inlined_entry_block_(NULL), 81 is_inline_return_target_(false), 82 is_reachable_(true), 83 dominates_loop_successors_(false), 84 is_osr_entry_(false), 85 is_ordered_(false) { } 86 87 88 Isolate* HBasicBlock::isolate() const { 89 return graph_->isolate(); 90 } 91 92 93 void HBasicBlock::MarkUnreachable() { 94 is_reachable_ = false; 95 } 96 97 98 void HBasicBlock::AttachLoopInformation() { 99 ASSERT(!IsLoopHeader()); 100 loop_information_ = new(zone()) HLoopInformation(this, zone()); 101 } 102 103 104 void HBasicBlock::DetachLoopInformation() { 105 ASSERT(IsLoopHeader()); 106 loop_information_ = NULL; 107 } 108 109 110 void HBasicBlock::AddPhi(HPhi* phi) { 111 ASSERT(!IsStartBlock()); 112 phis_.Add(phi, zone()); 113 phi->SetBlock(this); 114 } 115 116 117 void HBasicBlock::RemovePhi(HPhi* phi) { 118 ASSERT(phi->block() == this); 119 ASSERT(phis_.Contains(phi)); 120 phi->Kill(); 121 phis_.RemoveElement(phi); 122 phi->SetBlock(NULL); 123 } 124 125 126 void HBasicBlock::AddInstruction(HInstruction* instr, 127 HSourcePosition position) { 128 ASSERT(!IsStartBlock() || !IsFinished()); 129 ASSERT(!instr->IsLinked()); 130 ASSERT(!IsFinished()); 131 132 if (!position.IsUnknown()) { 133 instr->set_position(position); 134 } 135 if (first_ == NULL) { 136 ASSERT(last_environment() != NULL); 137 ASSERT(!last_environment()->ast_id().IsNone()); 138 HBlockEntry* entry = new(zone()) HBlockEntry(); 139 entry->InitializeAsFirst(this); 140 if (!position.IsUnknown()) { 141 entry->set_position(position); 142 } else { 143 ASSERT(!FLAG_hydrogen_track_positions || 144 !graph()->info()->IsOptimizing()); 145 } 146 first_ = last_ = entry; 147 } 148 instr->InsertAfter(last_); 149 } 150 151 152 HPhi* HBasicBlock::AddNewPhi(int merged_index) { 153 if (graph()->IsInsideNoSideEffectsScope()) { 154 merged_index = HPhi::kInvalidMergedIndex; 155 } 156 HPhi* phi = new(zone()) HPhi(merged_index, zone()); 157 AddPhi(phi); 158 return phi; 159 } 160 161 162 HSimulate* HBasicBlock::CreateSimulate(BailoutId ast_id, 163 RemovableSimulate removable) { 164 ASSERT(HasEnvironment()); 165 HEnvironment* environment = last_environment(); 166 ASSERT(ast_id.IsNone() || 167 ast_id == BailoutId::StubEntry() || 168 environment->closure()->shared()->VerifyBailoutId(ast_id)); 169 170 int push_count = environment->push_count(); 171 int pop_count = environment->pop_count(); 172 173 HSimulate* instr = 174 new(zone()) HSimulate(ast_id, pop_count, zone(), removable); 175 #ifdef DEBUG 176 instr->set_closure(environment->closure()); 177 #endif 178 // Order of pushed values: newest (top of stack) first. This allows 179 // HSimulate::MergeWith() to easily append additional pushed values 180 // that are older (from further down the stack). 181 for (int i = 0; i < push_count; ++i) { 182 instr->AddPushedValue(environment->ExpressionStackAt(i)); 183 } 184 for (GrowableBitVector::Iterator it(environment->assigned_variables(), 185 zone()); 186 !it.Done(); 187 it.Advance()) { 188 int index = it.Current(); 189 instr->AddAssignedValue(index, environment->Lookup(index)); 190 } 191 environment->ClearHistory(); 192 return instr; 193 } 194 195 196 void HBasicBlock::Finish(HControlInstruction* end, HSourcePosition position) { 197 ASSERT(!IsFinished()); 198 AddInstruction(end, position); 199 end_ = end; 200 for (HSuccessorIterator it(end); !it.Done(); it.Advance()) { 201 it.Current()->RegisterPredecessor(this); 202 } 203 } 204 205 206 void HBasicBlock::Goto(HBasicBlock* block, 207 HSourcePosition position, 208 FunctionState* state, 209 bool add_simulate) { 210 bool drop_extra = state != NULL && 211 state->inlining_kind() == NORMAL_RETURN; 212 213 if (block->IsInlineReturnTarget()) { 214 HEnvironment* env = last_environment(); 215 int argument_count = env->arguments_environment()->parameter_count(); 216 AddInstruction(new(zone()) 217 HLeaveInlined(state->entry(), argument_count), 218 position); 219 UpdateEnvironment(last_environment()->DiscardInlined(drop_extra)); 220 } 221 222 if (add_simulate) AddNewSimulate(BailoutId::None(), position); 223 HGoto* instr = new(zone()) HGoto(block); 224 Finish(instr, position); 225 } 226 227 228 void HBasicBlock::AddLeaveInlined(HValue* return_value, 229 FunctionState* state, 230 HSourcePosition position) { 231 HBasicBlock* target = state->function_return(); 232 bool drop_extra = state->inlining_kind() == NORMAL_RETURN; 233 234 ASSERT(target->IsInlineReturnTarget()); 235 ASSERT(return_value != NULL); 236 HEnvironment* env = last_environment(); 237 int argument_count = env->arguments_environment()->parameter_count(); 238 AddInstruction(new(zone()) HLeaveInlined(state->entry(), argument_count), 239 position); 240 UpdateEnvironment(last_environment()->DiscardInlined(drop_extra)); 241 last_environment()->Push(return_value); 242 AddNewSimulate(BailoutId::None(), position); 243 HGoto* instr = new(zone()) HGoto(target); 244 Finish(instr, position); 245 } 246 247 248 void HBasicBlock::SetInitialEnvironment(HEnvironment* env) { 249 ASSERT(!HasEnvironment()); 250 ASSERT(first() == NULL); 251 UpdateEnvironment(env); 252 } 253 254 255 void HBasicBlock::UpdateEnvironment(HEnvironment* env) { 256 last_environment_ = env; 257 graph()->update_maximum_environment_size(env->first_expression_index()); 258 } 259 260 261 void HBasicBlock::SetJoinId(BailoutId ast_id) { 262 int length = predecessors_.length(); 263 ASSERT(length > 0); 264 for (int i = 0; i < length; i++) { 265 HBasicBlock* predecessor = predecessors_[i]; 266 ASSERT(predecessor->end()->IsGoto()); 267 HSimulate* simulate = HSimulate::cast(predecessor->end()->previous()); 268 ASSERT(i != 0 || 269 (predecessor->last_environment()->closure().is_null() || 270 predecessor->last_environment()->closure()->shared() 271 ->VerifyBailoutId(ast_id))); 272 simulate->set_ast_id(ast_id); 273 predecessor->last_environment()->set_ast_id(ast_id); 274 } 275 } 276 277 278 bool HBasicBlock::Dominates(HBasicBlock* other) const { 279 HBasicBlock* current = other->dominator(); 280 while (current != NULL) { 281 if (current == this) return true; 282 current = current->dominator(); 283 } 284 return false; 285 } 286 287 288 bool HBasicBlock::EqualToOrDominates(HBasicBlock* other) const { 289 if (this == other) return true; 290 return Dominates(other); 291 } 292 293 294 int HBasicBlock::LoopNestingDepth() const { 295 const HBasicBlock* current = this; 296 int result = (current->IsLoopHeader()) ? 1 : 0; 297 while (current->parent_loop_header() != NULL) { 298 current = current->parent_loop_header(); 299 result++; 300 } 301 return result; 302 } 303 304 305 void HBasicBlock::PostProcessLoopHeader(IterationStatement* stmt) { 306 ASSERT(IsLoopHeader()); 307 308 SetJoinId(stmt->EntryId()); 309 if (predecessors()->length() == 1) { 310 // This is a degenerated loop. 311 DetachLoopInformation(); 312 return; 313 } 314 315 // Only the first entry into the loop is from outside the loop. All other 316 // entries must be back edges. 317 for (int i = 1; i < predecessors()->length(); ++i) { 318 loop_information()->RegisterBackEdge(predecessors()->at(i)); 319 } 320 } 321 322 323 void HBasicBlock::MarkSuccEdgeUnreachable(int succ) { 324 ASSERT(IsFinished()); 325 HBasicBlock* succ_block = end()->SuccessorAt(succ); 326 327 ASSERT(succ_block->predecessors()->length() == 1); 328 succ_block->MarkUnreachable(); 329 } 330 331 332 void HBasicBlock::RegisterPredecessor(HBasicBlock* pred) { 333 if (HasPredecessor()) { 334 // Only loop header blocks can have a predecessor added after 335 // instructions have been added to the block (they have phis for all 336 // values in the environment, these phis may be eliminated later). 337 ASSERT(IsLoopHeader() || first_ == NULL); 338 HEnvironment* incoming_env = pred->last_environment(); 339 if (IsLoopHeader()) { 340 ASSERT(phis()->length() == incoming_env->length()); 341 for (int i = 0; i < phis_.length(); ++i) { 342 phis_[i]->AddInput(incoming_env->values()->at(i)); 343 } 344 } else { 345 last_environment()->AddIncomingEdge(this, pred->last_environment()); 346 } 347 } else if (!HasEnvironment() && !IsFinished()) { 348 ASSERT(!IsLoopHeader()); 349 SetInitialEnvironment(pred->last_environment()->Copy()); 350 } 351 352 predecessors_.Add(pred, zone()); 353 } 354 355 356 void HBasicBlock::AddDominatedBlock(HBasicBlock* block) { 357 ASSERT(!dominated_blocks_.Contains(block)); 358 // Keep the list of dominated blocks sorted such that if there is two 359 // succeeding block in this list, the predecessor is before the successor. 360 int index = 0; 361 while (index < dominated_blocks_.length() && 362 dominated_blocks_[index]->block_id() < block->block_id()) { 363 ++index; 364 } 365 dominated_blocks_.InsertAt(index, block, zone()); 366 } 367 368 369 void HBasicBlock::AssignCommonDominator(HBasicBlock* other) { 370 if (dominator_ == NULL) { 371 dominator_ = other; 372 other->AddDominatedBlock(this); 373 } else if (other->dominator() != NULL) { 374 HBasicBlock* first = dominator_; 375 HBasicBlock* second = other; 376 377 while (first != second) { 378 if (first->block_id() > second->block_id()) { 379 first = first->dominator(); 380 } else { 381 second = second->dominator(); 382 } 383 ASSERT(first != NULL && second != NULL); 384 } 385 386 if (dominator_ != first) { 387 ASSERT(dominator_->dominated_blocks_.Contains(this)); 388 dominator_->dominated_blocks_.RemoveElement(this); 389 dominator_ = first; 390 first->AddDominatedBlock(this); 391 } 392 } 393 } 394 395 396 void HBasicBlock::AssignLoopSuccessorDominators() { 397 // Mark blocks that dominate all subsequent reachable blocks inside their 398 // loop. Exploit the fact that blocks are sorted in reverse post order. When 399 // the loop is visited in increasing block id order, if the number of 400 // non-loop-exiting successor edges at the dominator_candidate block doesn't 401 // exceed the number of previously encountered predecessor edges, there is no 402 // path from the loop header to any block with higher id that doesn't go 403 // through the dominator_candidate block. In this case, the 404 // dominator_candidate block is guaranteed to dominate all blocks reachable 405 // from it with higher ids. 406 HBasicBlock* last = loop_information()->GetLastBackEdge(); 407 int outstanding_successors = 1; // one edge from the pre-header 408 // Header always dominates everything. 409 MarkAsLoopSuccessorDominator(); 410 for (int j = block_id(); j <= last->block_id(); ++j) { 411 HBasicBlock* dominator_candidate = graph_->blocks()->at(j); 412 for (HPredecessorIterator it(dominator_candidate); !it.Done(); 413 it.Advance()) { 414 HBasicBlock* predecessor = it.Current(); 415 // Don't count back edges. 416 if (predecessor->block_id() < dominator_candidate->block_id()) { 417 outstanding_successors--; 418 } 419 } 420 421 // If more successors than predecessors have been seen in the loop up to 422 // now, it's not possible to guarantee that the current block dominates 423 // all of the blocks with higher IDs. In this case, assume conservatively 424 // that those paths through loop that don't go through the current block 425 // contain all of the loop's dependencies. Also be careful to record 426 // dominator information about the current loop that's being processed, 427 // and not nested loops, which will be processed when 428 // AssignLoopSuccessorDominators gets called on their header. 429 ASSERT(outstanding_successors >= 0); 430 HBasicBlock* parent_loop_header = dominator_candidate->parent_loop_header(); 431 if (outstanding_successors == 0 && 432 (parent_loop_header == this && !dominator_candidate->IsLoopHeader())) { 433 dominator_candidate->MarkAsLoopSuccessorDominator(); 434 } 435 HControlInstruction* end = dominator_candidate->end(); 436 for (HSuccessorIterator it(end); !it.Done(); it.Advance()) { 437 HBasicBlock* successor = it.Current(); 438 // Only count successors that remain inside the loop and don't loop back 439 // to a loop header. 440 if (successor->block_id() > dominator_candidate->block_id() && 441 successor->block_id() <= last->block_id()) { 442 // Backwards edges must land on loop headers. 443 ASSERT(successor->block_id() > dominator_candidate->block_id() || 444 successor->IsLoopHeader()); 445 outstanding_successors++; 446 } 447 } 448 } 449 } 450 451 452 int HBasicBlock::PredecessorIndexOf(HBasicBlock* predecessor) const { 453 for (int i = 0; i < predecessors_.length(); ++i) { 454 if (predecessors_[i] == predecessor) return i; 455 } 456 UNREACHABLE(); 457 return -1; 458 } 459 460 461 #ifdef DEBUG 462 void HBasicBlock::Verify() { 463 // Check that every block is finished. 464 ASSERT(IsFinished()); 465 ASSERT(block_id() >= 0); 466 467 // Check that the incoming edges are in edge split form. 468 if (predecessors_.length() > 1) { 469 for (int i = 0; i < predecessors_.length(); ++i) { 470 ASSERT(predecessors_[i]->end()->SecondSuccessor() == NULL); 471 } 472 } 473 } 474 #endif 475 476 477 void HLoopInformation::RegisterBackEdge(HBasicBlock* block) { 478 this->back_edges_.Add(block, block->zone()); 479 AddBlock(block); 480 } 481 482 483 HBasicBlock* HLoopInformation::GetLastBackEdge() const { 484 int max_id = -1; 485 HBasicBlock* result = NULL; 486 for (int i = 0; i < back_edges_.length(); ++i) { 487 HBasicBlock* cur = back_edges_[i]; 488 if (cur->block_id() > max_id) { 489 max_id = cur->block_id(); 490 result = cur; 491 } 492 } 493 return result; 494 } 495 496 497 void HLoopInformation::AddBlock(HBasicBlock* block) { 498 if (block == loop_header()) return; 499 if (block->parent_loop_header() == loop_header()) return; 500 if (block->parent_loop_header() != NULL) { 501 AddBlock(block->parent_loop_header()); 502 } else { 503 block->set_parent_loop_header(loop_header()); 504 blocks_.Add(block, block->zone()); 505 for (int i = 0; i < block->predecessors()->length(); ++i) { 506 AddBlock(block->predecessors()->at(i)); 507 } 508 } 509 } 510 511 512 #ifdef DEBUG 513 514 // Checks reachability of the blocks in this graph and stores a bit in 515 // the BitVector "reachable()" for every block that can be reached 516 // from the start block of the graph. If "dont_visit" is non-null, the given 517 // block is treated as if it would not be part of the graph. "visited_count()" 518 // returns the number of reachable blocks. 519 class ReachabilityAnalyzer BASE_EMBEDDED { 520 public: 521 ReachabilityAnalyzer(HBasicBlock* entry_block, 522 int block_count, 523 HBasicBlock* dont_visit) 524 : visited_count_(0), 525 stack_(16, entry_block->zone()), 526 reachable_(block_count, entry_block->zone()), 527 dont_visit_(dont_visit) { 528 PushBlock(entry_block); 529 Analyze(); 530 } 531 532 int visited_count() const { return visited_count_; } 533 const BitVector* reachable() const { return &reachable_; } 534 535 private: 536 void PushBlock(HBasicBlock* block) { 537 if (block != NULL && block != dont_visit_ && 538 !reachable_.Contains(block->block_id())) { 539 reachable_.Add(block->block_id()); 540 stack_.Add(block, block->zone()); 541 visited_count_++; 542 } 543 } 544 545 void Analyze() { 546 while (!stack_.is_empty()) { 547 HControlInstruction* end = stack_.RemoveLast()->end(); 548 for (HSuccessorIterator it(end); !it.Done(); it.Advance()) { 549 PushBlock(it.Current()); 550 } 551 } 552 } 553 554 int visited_count_; 555 ZoneList<HBasicBlock*> stack_; 556 BitVector reachable_; 557 HBasicBlock* dont_visit_; 558 }; 559 560 561 void HGraph::Verify(bool do_full_verify) const { 562 Heap::RelocationLock relocation_lock(isolate()->heap()); 563 AllowHandleDereference allow_deref; 564 AllowDeferredHandleDereference allow_deferred_deref; 565 for (int i = 0; i < blocks_.length(); i++) { 566 HBasicBlock* block = blocks_.at(i); 567 568 block->Verify(); 569 570 // Check that every block contains at least one node and that only the last 571 // node is a control instruction. 572 HInstruction* current = block->first(); 573 ASSERT(current != NULL && current->IsBlockEntry()); 574 while (current != NULL) { 575 ASSERT((current->next() == NULL) == current->IsControlInstruction()); 576 ASSERT(current->block() == block); 577 current->Verify(); 578 current = current->next(); 579 } 580 581 // Check that successors are correctly set. 582 HBasicBlock* first = block->end()->FirstSuccessor(); 583 HBasicBlock* second = block->end()->SecondSuccessor(); 584 ASSERT(second == NULL || first != NULL); 585 586 // Check that the predecessor array is correct. 587 if (first != NULL) { 588 ASSERT(first->predecessors()->Contains(block)); 589 if (second != NULL) { 590 ASSERT(second->predecessors()->Contains(block)); 591 } 592 } 593 594 // Check that phis have correct arguments. 595 for (int j = 0; j < block->phis()->length(); j++) { 596 HPhi* phi = block->phis()->at(j); 597 phi->Verify(); 598 } 599 600 // Check that all join blocks have predecessors that end with an 601 // unconditional goto and agree on their environment node id. 602 if (block->predecessors()->length() >= 2) { 603 BailoutId id = 604 block->predecessors()->first()->last_environment()->ast_id(); 605 for (int k = 0; k < block->predecessors()->length(); k++) { 606 HBasicBlock* predecessor = block->predecessors()->at(k); 607 ASSERT(predecessor->end()->IsGoto() || 608 predecessor->end()->IsDeoptimize()); 609 ASSERT(predecessor->last_environment()->ast_id() == id); 610 } 611 } 612 } 613 614 // Check special property of first block to have no predecessors. 615 ASSERT(blocks_.at(0)->predecessors()->is_empty()); 616 617 if (do_full_verify) { 618 // Check that the graph is fully connected. 619 ReachabilityAnalyzer analyzer(entry_block_, blocks_.length(), NULL); 620 ASSERT(analyzer.visited_count() == blocks_.length()); 621 622 // Check that entry block dominator is NULL. 623 ASSERT(entry_block_->dominator() == NULL); 624 625 // Check dominators. 626 for (int i = 0; i < blocks_.length(); ++i) { 627 HBasicBlock* block = blocks_.at(i); 628 if (block->dominator() == NULL) { 629 // Only start block may have no dominator assigned to. 630 ASSERT(i == 0); 631 } else { 632 // Assert that block is unreachable if dominator must not be visited. 633 ReachabilityAnalyzer dominator_analyzer(entry_block_, 634 blocks_.length(), 635 block->dominator()); 636 ASSERT(!dominator_analyzer.reachable()->Contains(block->block_id())); 637 } 638 } 639 } 640 } 641 642 #endif 643 644 645 HConstant* HGraph::GetConstant(SetOncePointer<HConstant>* pointer, 646 int32_t value) { 647 if (!pointer->is_set()) { 648 // Can't pass GetInvalidContext() to HConstant::New, because that will 649 // recursively call GetConstant 650 HConstant* constant = HConstant::New(zone(), NULL, value); 651 constant->InsertAfter(entry_block()->first()); 652 pointer->set(constant); 653 return constant; 654 } 655 return ReinsertConstantIfNecessary(pointer->get()); 656 } 657 658 659 HConstant* HGraph::ReinsertConstantIfNecessary(HConstant* constant) { 660 if (!constant->IsLinked()) { 661 // The constant was removed from the graph. Reinsert. 662 constant->ClearFlag(HValue::kIsDead); 663 constant->InsertAfter(entry_block()->first()); 664 } 665 return constant; 666 } 667 668 669 HConstant* HGraph::GetConstant0() { 670 return GetConstant(&constant_0_, 0); 671 } 672 673 674 HConstant* HGraph::GetConstant1() { 675 return GetConstant(&constant_1_, 1); 676 } 677 678 679 HConstant* HGraph::GetConstantMinus1() { 680 return GetConstant(&constant_minus1_, -1); 681 } 682 683 684 #define DEFINE_GET_CONSTANT(Name, name, type, htype, boolean_value) \ 685 HConstant* HGraph::GetConstant##Name() { \ 686 if (!constant_##name##_.is_set()) { \ 687 HConstant* constant = new(zone()) HConstant( \ 688 Unique<Object>::CreateImmovable(isolate()->factory()->name##_value()), \ 689 Unique<Map>::CreateImmovable(isolate()->factory()->type##_map()), \ 690 false, \ 691 Representation::Tagged(), \ 692 htype, \ 693 true, \ 694 boolean_value, \ 695 false, \ 696 ODDBALL_TYPE); \ 697 constant->InsertAfter(entry_block()->first()); \ 698 constant_##name##_.set(constant); \ 699 } \ 700 return ReinsertConstantIfNecessary(constant_##name##_.get()); \ 701 } 702 703 704 DEFINE_GET_CONSTANT(Undefined, undefined, undefined, HType::Undefined(), false) 705 DEFINE_GET_CONSTANT(True, true, boolean, HType::Boolean(), true) 706 DEFINE_GET_CONSTANT(False, false, boolean, HType::Boolean(), false) 707 DEFINE_GET_CONSTANT(Hole, the_hole, the_hole, HType::None(), false) 708 DEFINE_GET_CONSTANT(Null, null, null, HType::Null(), false) 709 710 711 #undef DEFINE_GET_CONSTANT 712 713 #define DEFINE_IS_CONSTANT(Name, name) \ 714 bool HGraph::IsConstant##Name(HConstant* constant) { \ 715 return constant_##name##_.is_set() && constant == constant_##name##_.get(); \ 716 } 717 DEFINE_IS_CONSTANT(Undefined, undefined) 718 DEFINE_IS_CONSTANT(0, 0) 719 DEFINE_IS_CONSTANT(1, 1) 720 DEFINE_IS_CONSTANT(Minus1, minus1) 721 DEFINE_IS_CONSTANT(True, true) 722 DEFINE_IS_CONSTANT(False, false) 723 DEFINE_IS_CONSTANT(Hole, the_hole) 724 DEFINE_IS_CONSTANT(Null, null) 725 726 #undef DEFINE_IS_CONSTANT 727 728 729 HConstant* HGraph::GetInvalidContext() { 730 return GetConstant(&constant_invalid_context_, 0xFFFFC0C7); 731 } 732 733 734 bool HGraph::IsStandardConstant(HConstant* constant) { 735 if (IsConstantUndefined(constant)) return true; 736 if (IsConstant0(constant)) return true; 737 if (IsConstant1(constant)) return true; 738 if (IsConstantMinus1(constant)) return true; 739 if (IsConstantTrue(constant)) return true; 740 if (IsConstantFalse(constant)) return true; 741 if (IsConstantHole(constant)) return true; 742 if (IsConstantNull(constant)) return true; 743 return false; 744 } 745 746 747 HGraphBuilder::IfBuilder::IfBuilder(HGraphBuilder* builder) 748 : builder_(builder), 749 finished_(false), 750 did_then_(false), 751 did_else_(false), 752 did_else_if_(false), 753 did_and_(false), 754 did_or_(false), 755 captured_(false), 756 needs_compare_(true), 757 pending_merge_block_(false), 758 split_edge_merge_block_(NULL), 759 merge_at_join_blocks_(NULL), 760 normal_merge_at_join_block_count_(0), 761 deopt_merge_at_join_block_count_(0) { 762 HEnvironment* env = builder->environment(); 763 first_true_block_ = builder->CreateBasicBlock(env->Copy()); 764 first_false_block_ = builder->CreateBasicBlock(env->Copy()); 765 } 766 767 768 HGraphBuilder::IfBuilder::IfBuilder( 769 HGraphBuilder* builder, 770 HIfContinuation* continuation) 771 : builder_(builder), 772 finished_(false), 773 did_then_(false), 774 did_else_(false), 775 did_else_if_(false), 776 did_and_(false), 777 did_or_(false), 778 captured_(false), 779 needs_compare_(false), 780 pending_merge_block_(false), 781 first_true_block_(NULL), 782 first_false_block_(NULL), 783 split_edge_merge_block_(NULL), 784 merge_at_join_blocks_(NULL), 785 normal_merge_at_join_block_count_(0), 786 deopt_merge_at_join_block_count_(0) { 787 continuation->Continue(&first_true_block_, 788 &first_false_block_); 789 } 790 791 792 HControlInstruction* HGraphBuilder::IfBuilder::AddCompare( 793 HControlInstruction* compare) { 794 ASSERT(did_then_ == did_else_); 795 if (did_else_) { 796 // Handle if-then-elseif 797 did_else_if_ = true; 798 did_else_ = false; 799 did_then_ = false; 800 did_and_ = false; 801 did_or_ = false; 802 pending_merge_block_ = false; 803 split_edge_merge_block_ = NULL; 804 HEnvironment* env = builder_->environment(); 805 first_true_block_ = builder_->CreateBasicBlock(env->Copy()); 806 first_false_block_ = builder_->CreateBasicBlock(env->Copy()); 807 } 808 if (split_edge_merge_block_ != NULL) { 809 HEnvironment* env = first_false_block_->last_environment(); 810 HBasicBlock* split_edge = 811 builder_->CreateBasicBlock(env->Copy()); 812 if (did_or_) { 813 compare->SetSuccessorAt(0, split_edge); 814 compare->SetSuccessorAt(1, first_false_block_); 815 } else { 816 compare->SetSuccessorAt(0, first_true_block_); 817 compare->SetSuccessorAt(1, split_edge); 818 } 819 builder_->GotoNoSimulate(split_edge, split_edge_merge_block_); 820 } else { 821 compare->SetSuccessorAt(0, first_true_block_); 822 compare->SetSuccessorAt(1, first_false_block_); 823 } 824 builder_->FinishCurrentBlock(compare); 825 needs_compare_ = false; 826 return compare; 827 } 828 829 830 void HGraphBuilder::IfBuilder::Or() { 831 ASSERT(!needs_compare_); 832 ASSERT(!did_and_); 833 did_or_ = true; 834 HEnvironment* env = first_false_block_->last_environment(); 835 if (split_edge_merge_block_ == NULL) { 836 split_edge_merge_block_ = 837 builder_->CreateBasicBlock(env->Copy()); 838 builder_->GotoNoSimulate(first_true_block_, split_edge_merge_block_); 839 first_true_block_ = split_edge_merge_block_; 840 } 841 builder_->set_current_block(first_false_block_); 842 first_false_block_ = builder_->CreateBasicBlock(env->Copy()); 843 } 844 845 846 void HGraphBuilder::IfBuilder::And() { 847 ASSERT(!needs_compare_); 848 ASSERT(!did_or_); 849 did_and_ = true; 850 HEnvironment* env = first_false_block_->last_environment(); 851 if (split_edge_merge_block_ == NULL) { 852 split_edge_merge_block_ = builder_->CreateBasicBlock(env->Copy()); 853 builder_->GotoNoSimulate(first_false_block_, split_edge_merge_block_); 854 first_false_block_ = split_edge_merge_block_; 855 } 856 builder_->set_current_block(first_true_block_); 857 first_true_block_ = builder_->CreateBasicBlock(env->Copy()); 858 } 859 860 861 void HGraphBuilder::IfBuilder::CaptureContinuation( 862 HIfContinuation* continuation) { 863 ASSERT(!did_else_if_); 864 ASSERT(!finished_); 865 ASSERT(!captured_); 866 867 HBasicBlock* true_block = NULL; 868 HBasicBlock* false_block = NULL; 869 Finish(&true_block, &false_block); 870 ASSERT(true_block != NULL); 871 ASSERT(false_block != NULL); 872 continuation->Capture(true_block, false_block); 873 captured_ = true; 874 builder_->set_current_block(NULL); 875 End(); 876 } 877 878 879 void HGraphBuilder::IfBuilder::JoinContinuation(HIfContinuation* continuation) { 880 ASSERT(!did_else_if_); 881 ASSERT(!finished_); 882 ASSERT(!captured_); 883 HBasicBlock* true_block = NULL; 884 HBasicBlock* false_block = NULL; 885 Finish(&true_block, &false_block); 886 merge_at_join_blocks_ = NULL; 887 if (true_block != NULL && !true_block->IsFinished()) { 888 ASSERT(continuation->IsTrueReachable()); 889 builder_->GotoNoSimulate(true_block, continuation->true_branch()); 890 } 891 if (false_block != NULL && !false_block->IsFinished()) { 892 ASSERT(continuation->IsFalseReachable()); 893 builder_->GotoNoSimulate(false_block, continuation->false_branch()); 894 } 895 captured_ = true; 896 End(); 897 } 898 899 900 void HGraphBuilder::IfBuilder::Then() { 901 ASSERT(!captured_); 902 ASSERT(!finished_); 903 did_then_ = true; 904 if (needs_compare_) { 905 // Handle if's without any expressions, they jump directly to the "else" 906 // branch. However, we must pretend that the "then" branch is reachable, 907 // so that the graph builder visits it and sees any live range extending 908 // constructs within it. 909 HConstant* constant_false = builder_->graph()->GetConstantFalse(); 910 ToBooleanStub::Types boolean_type = ToBooleanStub::Types(); 911 boolean_type.Add(ToBooleanStub::BOOLEAN); 912 HBranch* branch = builder()->New<HBranch>( 913 constant_false, boolean_type, first_true_block_, first_false_block_); 914 builder_->FinishCurrentBlock(branch); 915 } 916 builder_->set_current_block(first_true_block_); 917 pending_merge_block_ = true; 918 } 919 920 921 void HGraphBuilder::IfBuilder::Else() { 922 ASSERT(did_then_); 923 ASSERT(!captured_); 924 ASSERT(!finished_); 925 AddMergeAtJoinBlock(false); 926 builder_->set_current_block(first_false_block_); 927 pending_merge_block_ = true; 928 did_else_ = true; 929 } 930 931 932 void HGraphBuilder::IfBuilder::Deopt(const char* reason) { 933 ASSERT(did_then_); 934 builder_->Add<HDeoptimize>(reason, Deoptimizer::EAGER); 935 AddMergeAtJoinBlock(true); 936 } 937 938 939 void HGraphBuilder::IfBuilder::Return(HValue* value) { 940 HValue* parameter_count = builder_->graph()->GetConstantMinus1(); 941 builder_->FinishExitCurrentBlock( 942 builder_->New<HReturn>(value, parameter_count)); 943 AddMergeAtJoinBlock(false); 944 } 945 946 947 void HGraphBuilder::IfBuilder::AddMergeAtJoinBlock(bool deopt) { 948 if (!pending_merge_block_) return; 949 HBasicBlock* block = builder_->current_block(); 950 ASSERT(block == NULL || !block->IsFinished()); 951 MergeAtJoinBlock* record = 952 new(builder_->zone()) MergeAtJoinBlock(block, deopt, 953 merge_at_join_blocks_); 954 merge_at_join_blocks_ = record; 955 if (block != NULL) { 956 ASSERT(block->end() == NULL); 957 if (deopt) { 958 normal_merge_at_join_block_count_++; 959 } else { 960 deopt_merge_at_join_block_count_++; 961 } 962 } 963 builder_->set_current_block(NULL); 964 pending_merge_block_ = false; 965 } 966 967 968 void HGraphBuilder::IfBuilder::Finish() { 969 ASSERT(!finished_); 970 if (!did_then_) { 971 Then(); 972 } 973 AddMergeAtJoinBlock(false); 974 if (!did_else_) { 975 Else(); 976 AddMergeAtJoinBlock(false); 977 } 978 finished_ = true; 979 } 980 981 982 void HGraphBuilder::IfBuilder::Finish(HBasicBlock** then_continuation, 983 HBasicBlock** else_continuation) { 984 Finish(); 985 986 MergeAtJoinBlock* else_record = merge_at_join_blocks_; 987 if (else_continuation != NULL) { 988 *else_continuation = else_record->block_; 989 } 990 MergeAtJoinBlock* then_record = else_record->next_; 991 if (then_continuation != NULL) { 992 *then_continuation = then_record->block_; 993 } 994 ASSERT(then_record->next_ == NULL); 995 } 996 997 998 void HGraphBuilder::IfBuilder::End() { 999 if (captured_) return; 1000 Finish(); 1001 1002 int total_merged_blocks = normal_merge_at_join_block_count_ + 1003 deopt_merge_at_join_block_count_; 1004 ASSERT(total_merged_blocks >= 1); 1005 HBasicBlock* merge_block = total_merged_blocks == 1 1006 ? NULL : builder_->graph()->CreateBasicBlock(); 1007 1008 // Merge non-deopt blocks first to ensure environment has right size for 1009 // padding. 1010 MergeAtJoinBlock* current = merge_at_join_blocks_; 1011 while (current != NULL) { 1012 if (!current->deopt_ && current->block_ != NULL) { 1013 // If there is only one block that makes it through to the end of the 1014 // if, then just set it as the current block and continue rather then 1015 // creating an unnecessary merge block. 1016 if (total_merged_blocks == 1) { 1017 builder_->set_current_block(current->block_); 1018 return; 1019 } 1020 builder_->GotoNoSimulate(current->block_, merge_block); 1021 } 1022 current = current->next_; 1023 } 1024 1025 // Merge deopt blocks, padding when necessary. 1026 current = merge_at_join_blocks_; 1027 while (current != NULL) { 1028 if (current->deopt_ && current->block_ != NULL) { 1029 current->block_->FinishExit( 1030 HAbnormalExit::New(builder_->zone(), NULL), 1031 HSourcePosition::Unknown()); 1032 } 1033 current = current->next_; 1034 } 1035 builder_->set_current_block(merge_block); 1036 } 1037 1038 1039 HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder, 1040 HValue* context, 1041 LoopBuilder::Direction direction) 1042 : builder_(builder), 1043 context_(context), 1044 direction_(direction), 1045 finished_(false) { 1046 header_block_ = builder->CreateLoopHeaderBlock(); 1047 body_block_ = NULL; 1048 exit_block_ = NULL; 1049 exit_trampoline_block_ = NULL; 1050 increment_amount_ = builder_->graph()->GetConstant1(); 1051 } 1052 1053 1054 HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder, 1055 HValue* context, 1056 LoopBuilder::Direction direction, 1057 HValue* increment_amount) 1058 : builder_(builder), 1059 context_(context), 1060 direction_(direction), 1061 finished_(false) { 1062 header_block_ = builder->CreateLoopHeaderBlock(); 1063 body_block_ = NULL; 1064 exit_block_ = NULL; 1065 exit_trampoline_block_ = NULL; 1066 increment_amount_ = increment_amount; 1067 } 1068 1069 1070 HValue* HGraphBuilder::LoopBuilder::BeginBody( 1071 HValue* initial, 1072 HValue* terminating, 1073 Token::Value token) { 1074 HEnvironment* env = builder_->environment(); 1075 phi_ = header_block_->AddNewPhi(env->values()->length()); 1076 phi_->AddInput(initial); 1077 env->Push(initial); 1078 builder_->GotoNoSimulate(header_block_); 1079 1080 HEnvironment* body_env = env->Copy(); 1081 HEnvironment* exit_env = env->Copy(); 1082 // Remove the phi from the expression stack 1083 body_env->Pop(); 1084 exit_env->Pop(); 1085 body_block_ = builder_->CreateBasicBlock(body_env); 1086 exit_block_ = builder_->CreateBasicBlock(exit_env); 1087 1088 builder_->set_current_block(header_block_); 1089 env->Pop(); 1090 builder_->FinishCurrentBlock(builder_->New<HCompareNumericAndBranch>( 1091 phi_, terminating, token, body_block_, exit_block_)); 1092 1093 builder_->set_current_block(body_block_); 1094 if (direction_ == kPreIncrement || direction_ == kPreDecrement) { 1095 HValue* one = builder_->graph()->GetConstant1(); 1096 if (direction_ == kPreIncrement) { 1097 increment_ = HAdd::New(zone(), context_, phi_, one); 1098 } else { 1099 increment_ = HSub::New(zone(), context_, phi_, one); 1100 } 1101 increment_->ClearFlag(HValue::kCanOverflow); 1102 builder_->AddInstruction(increment_); 1103 return increment_; 1104 } else { 1105 return phi_; 1106 } 1107 } 1108 1109 1110 void HGraphBuilder::LoopBuilder::Break() { 1111 if (exit_trampoline_block_ == NULL) { 1112 // Its the first time we saw a break. 1113 HEnvironment* env = exit_block_->last_environment()->Copy(); 1114 exit_trampoline_block_ = builder_->CreateBasicBlock(env); 1115 builder_->GotoNoSimulate(exit_block_, exit_trampoline_block_); 1116 } 1117 1118 builder_->GotoNoSimulate(exit_trampoline_block_); 1119 builder_->set_current_block(NULL); 1120 } 1121 1122 1123 void HGraphBuilder::LoopBuilder::EndBody() { 1124 ASSERT(!finished_); 1125 1126 if (direction_ == kPostIncrement || direction_ == kPostDecrement) { 1127 if (direction_ == kPostIncrement) { 1128 increment_ = HAdd::New(zone(), context_, phi_, increment_amount_); 1129 } else { 1130 increment_ = HSub::New(zone(), context_, phi_, increment_amount_); 1131 } 1132 increment_->ClearFlag(HValue::kCanOverflow); 1133 builder_->AddInstruction(increment_); 1134 } 1135 1136 // Push the new increment value on the expression stack to merge into the phi. 1137 builder_->environment()->Push(increment_); 1138 HBasicBlock* last_block = builder_->current_block(); 1139 builder_->GotoNoSimulate(last_block, header_block_); 1140 header_block_->loop_information()->RegisterBackEdge(last_block); 1141 1142 if (exit_trampoline_block_ != NULL) { 1143 builder_->set_current_block(exit_trampoline_block_); 1144 } else { 1145 builder_->set_current_block(exit_block_); 1146 } 1147 finished_ = true; 1148 } 1149 1150 1151 HGraph* HGraphBuilder::CreateGraph() { 1152 graph_ = new(zone()) HGraph(info_); 1153 if (FLAG_hydrogen_stats) isolate()->GetHStatistics()->Initialize(info_); 1154 CompilationPhase phase("H_Block building", info_); 1155 set_current_block(graph()->entry_block()); 1156 if (!BuildGraph()) return NULL; 1157 graph()->FinalizeUniqueness(); 1158 return graph_; 1159 } 1160 1161 1162 HInstruction* HGraphBuilder::AddInstruction(HInstruction* instr) { 1163 ASSERT(current_block() != NULL); 1164 ASSERT(!FLAG_hydrogen_track_positions || 1165 !position_.IsUnknown() || 1166 !info_->IsOptimizing()); 1167 current_block()->AddInstruction(instr, source_position()); 1168 if (graph()->IsInsideNoSideEffectsScope()) { 1169 instr->SetFlag(HValue::kHasNoObservableSideEffects); 1170 } 1171 return instr; 1172 } 1173 1174 1175 void HGraphBuilder::FinishCurrentBlock(HControlInstruction* last) { 1176 ASSERT(!FLAG_hydrogen_track_positions || 1177 !info_->IsOptimizing() || 1178 !position_.IsUnknown()); 1179 current_block()->Finish(last, source_position()); 1180 if (last->IsReturn() || last->IsAbnormalExit()) { 1181 set_current_block(NULL); 1182 } 1183 } 1184 1185 1186 void HGraphBuilder::FinishExitCurrentBlock(HControlInstruction* instruction) { 1187 ASSERT(!FLAG_hydrogen_track_positions || !info_->IsOptimizing() || 1188 !position_.IsUnknown()); 1189 current_block()->FinishExit(instruction, source_position()); 1190 if (instruction->IsReturn() || instruction->IsAbnormalExit()) { 1191 set_current_block(NULL); 1192 } 1193 } 1194 1195 1196 void HGraphBuilder::AddIncrementCounter(StatsCounter* counter) { 1197 if (FLAG_native_code_counters && counter->Enabled()) { 1198 HValue* reference = Add<HConstant>(ExternalReference(counter)); 1199 HValue* old_value = Add<HLoadNamedField>( 1200 reference, static_cast<HValue*>(NULL), HObjectAccess::ForCounter()); 1201 HValue* new_value = AddUncasted<HAdd>(old_value, graph()->GetConstant1()); 1202 new_value->ClearFlag(HValue::kCanOverflow); // Ignore counter overflow 1203 Add<HStoreNamedField>(reference, HObjectAccess::ForCounter(), 1204 new_value, STORE_TO_INITIALIZED_ENTRY); 1205 } 1206 } 1207 1208 1209 void HGraphBuilder::AddSimulate(BailoutId id, 1210 RemovableSimulate removable) { 1211 ASSERT(current_block() != NULL); 1212 ASSERT(!graph()->IsInsideNoSideEffectsScope()); 1213 current_block()->AddNewSimulate(id, source_position(), removable); 1214 } 1215 1216 1217 HBasicBlock* HGraphBuilder::CreateBasicBlock(HEnvironment* env) { 1218 HBasicBlock* b = graph()->CreateBasicBlock(); 1219 b->SetInitialEnvironment(env); 1220 return b; 1221 } 1222 1223 1224 HBasicBlock* HGraphBuilder::CreateLoopHeaderBlock() { 1225 HBasicBlock* header = graph()->CreateBasicBlock(); 1226 HEnvironment* entry_env = environment()->CopyAsLoopHeader(header); 1227 header->SetInitialEnvironment(entry_env); 1228 header->AttachLoopInformation(); 1229 return header; 1230 } 1231 1232 1233 HValue* HGraphBuilder::BuildGetElementsKind(HValue* object) { 1234 HValue* map = Add<HLoadNamedField>(object, static_cast<HValue*>(NULL), 1235 HObjectAccess::ForMap()); 1236 1237 HValue* bit_field2 = Add<HLoadNamedField>(map, static_cast<HValue*>(NULL), 1238 HObjectAccess::ForMapBitField2()); 1239 return BuildDecodeField<Map::ElementsKindBits>(bit_field2); 1240 } 1241 1242 1243 HValue* HGraphBuilder::BuildCheckHeapObject(HValue* obj) { 1244 if (obj->type().IsHeapObject()) return obj; 1245 return Add<HCheckHeapObject>(obj); 1246 } 1247 1248 1249 void HGraphBuilder::FinishExitWithHardDeoptimization(const char* reason) { 1250 Add<HDeoptimize>(reason, Deoptimizer::EAGER); 1251 FinishExitCurrentBlock(New<HAbnormalExit>()); 1252 } 1253 1254 1255 HValue* HGraphBuilder::BuildCheckString(HValue* string) { 1256 if (!string->type().IsString()) { 1257 ASSERT(!string->IsConstant() || 1258 !HConstant::cast(string)->HasStringValue()); 1259 BuildCheckHeapObject(string); 1260 return Add<HCheckInstanceType>(string, HCheckInstanceType::IS_STRING); 1261 } 1262 return string; 1263 } 1264 1265 1266 HValue* HGraphBuilder::BuildWrapReceiver(HValue* object, HValue* function) { 1267 if (object->type().IsJSObject()) return object; 1268 if (function->IsConstant() && 1269 HConstant::cast(function)->handle(isolate())->IsJSFunction()) { 1270 Handle<JSFunction> f = Handle<JSFunction>::cast( 1271 HConstant::cast(function)->handle(isolate())); 1272 SharedFunctionInfo* shared = f->shared(); 1273 if (shared->strict_mode() == STRICT || shared->native()) return object; 1274 } 1275 return Add<HWrapReceiver>(object, function); 1276 } 1277 1278 1279 HValue* HGraphBuilder::BuildCheckForCapacityGrow( 1280 HValue* object, 1281 HValue* elements, 1282 ElementsKind kind, 1283 HValue* length, 1284 HValue* key, 1285 bool is_js_array, 1286 PropertyAccessType access_type) { 1287 IfBuilder length_checker(this); 1288 1289 Token::Value token = IsHoleyElementsKind(kind) ? Token::GTE : Token::EQ; 1290 length_checker.If<HCompareNumericAndBranch>(key, length, token); 1291 1292 length_checker.Then(); 1293 1294 HValue* current_capacity = AddLoadFixedArrayLength(elements); 1295 1296 IfBuilder capacity_checker(this); 1297 1298 capacity_checker.If<HCompareNumericAndBranch>(key, current_capacity, 1299 Token::GTE); 1300 capacity_checker.Then(); 1301 1302 HValue* max_gap = Add<HConstant>(static_cast<int32_t>(JSObject::kMaxGap)); 1303 HValue* max_capacity = AddUncasted<HAdd>(current_capacity, max_gap); 1304 1305 Add<HBoundsCheck>(key, max_capacity); 1306 1307 HValue* new_capacity = BuildNewElementsCapacity(key); 1308 HValue* new_elements = BuildGrowElementsCapacity(object, elements, 1309 kind, kind, length, 1310 new_capacity); 1311 1312 environment()->Push(new_elements); 1313 capacity_checker.Else(); 1314 1315 environment()->Push(elements); 1316 capacity_checker.End(); 1317 1318 if (is_js_array) { 1319 HValue* new_length = AddUncasted<HAdd>(key, graph_->GetConstant1()); 1320 new_length->ClearFlag(HValue::kCanOverflow); 1321 1322 Add<HStoreNamedField>(object, HObjectAccess::ForArrayLength(kind), 1323 new_length); 1324 } 1325 1326 if (access_type == STORE && kind == FAST_SMI_ELEMENTS) { 1327 HValue* checked_elements = environment()->Top(); 1328 1329 // Write zero to ensure that the new element is initialized with some smi. 1330 Add<HStoreKeyed>(checked_elements, key, graph()->GetConstant0(), kind); 1331 } 1332 1333 length_checker.Else(); 1334 Add<HBoundsCheck>(key, length); 1335 1336 environment()->Push(elements); 1337 length_checker.End(); 1338 1339 return environment()->Pop(); 1340 } 1341 1342 1343 HValue* HGraphBuilder::BuildCopyElementsOnWrite(HValue* object, 1344 HValue* elements, 1345 ElementsKind kind, 1346 HValue* length) { 1347 Factory* factory = isolate()->factory(); 1348 1349 IfBuilder cow_checker(this); 1350 1351 cow_checker.If<HCompareMap>(elements, factory->fixed_cow_array_map()); 1352 cow_checker.Then(); 1353 1354 HValue* capacity = AddLoadFixedArrayLength(elements); 1355 1356 HValue* new_elements = BuildGrowElementsCapacity(object, elements, kind, 1357 kind, length, capacity); 1358 1359 environment()->Push(new_elements); 1360 1361 cow_checker.Else(); 1362 1363 environment()->Push(elements); 1364 1365 cow_checker.End(); 1366 1367 return environment()->Pop(); 1368 } 1369 1370 1371 void HGraphBuilder::BuildTransitionElementsKind(HValue* object, 1372 HValue* map, 1373 ElementsKind from_kind, 1374 ElementsKind to_kind, 1375 bool is_jsarray) { 1376 ASSERT(!IsFastHoleyElementsKind(from_kind) || 1377 IsFastHoleyElementsKind(to_kind)); 1378 1379 if (AllocationSite::GetMode(from_kind, to_kind) == TRACK_ALLOCATION_SITE) { 1380 Add<HTrapAllocationMemento>(object); 1381 } 1382 1383 if (!IsSimpleMapChangeTransition(from_kind, to_kind)) { 1384 HInstruction* elements = AddLoadElements(object); 1385 1386 HInstruction* empty_fixed_array = Add<HConstant>( 1387 isolate()->factory()->empty_fixed_array()); 1388 1389 IfBuilder if_builder(this); 1390 1391 if_builder.IfNot<HCompareObjectEqAndBranch>(elements, empty_fixed_array); 1392 1393 if_builder.Then(); 1394 1395 HInstruction* elements_length = AddLoadFixedArrayLength(elements); 1396 1397 HInstruction* array_length = is_jsarray 1398 ? Add<HLoadNamedField>(object, static_cast<HValue*>(NULL), 1399 HObjectAccess::ForArrayLength(from_kind)) 1400 : elements_length; 1401 1402 BuildGrowElementsCapacity(object, elements, from_kind, to_kind, 1403 array_length, elements_length); 1404 1405 if_builder.End(); 1406 } 1407 1408 Add<HStoreNamedField>(object, HObjectAccess::ForMap(), map); 1409 } 1410 1411 1412 void HGraphBuilder::BuildJSObjectCheck(HValue* receiver, 1413 int bit_field_mask) { 1414 // Check that the object isn't a smi. 1415 Add<HCheckHeapObject>(receiver); 1416 1417 // Get the map of the receiver. 1418 HValue* map = Add<HLoadNamedField>(receiver, static_cast<HValue*>(NULL), 1419 HObjectAccess::ForMap()); 1420 1421 // Check the instance type and if an access check is needed, this can be 1422 // done with a single load, since both bytes are adjacent in the map. 1423 HObjectAccess access(HObjectAccess::ForMapInstanceTypeAndBitField()); 1424 HValue* instance_type_and_bit_field = 1425 Add<HLoadNamedField>(map, static_cast<HValue*>(NULL), access); 1426 1427 HValue* mask = Add<HConstant>(0x00FF | (bit_field_mask << 8)); 1428 HValue* and_result = AddUncasted<HBitwise>(Token::BIT_AND, 1429 instance_type_and_bit_field, 1430 mask); 1431 HValue* sub_result = AddUncasted<HSub>(and_result, 1432 Add<HConstant>(JS_OBJECT_TYPE)); 1433 Add<HBoundsCheck>(sub_result, Add<HConstant>(0x100 - JS_OBJECT_TYPE)); 1434 } 1435 1436 1437 void HGraphBuilder::BuildKeyedIndexCheck(HValue* key, 1438 HIfContinuation* join_continuation) { 1439 // The sometimes unintuitively backward ordering of the ifs below is 1440 // convoluted, but necessary. All of the paths must guarantee that the 1441 // if-true of the continuation returns a smi element index and the if-false of 1442 // the continuation returns either a symbol or a unique string key. All other 1443 // object types cause a deopt to fall back to the runtime. 1444 1445 IfBuilder key_smi_if(this); 1446 key_smi_if.If<HIsSmiAndBranch>(key); 1447 key_smi_if.Then(); 1448 { 1449 Push(key); // Nothing to do, just continue to true of continuation. 1450 } 1451 key_smi_if.Else(); 1452 { 1453 HValue* map = Add<HLoadNamedField>(key, static_cast<HValue*>(NULL), 1454 HObjectAccess::ForMap()); 1455 HValue* instance_type = 1456 Add<HLoadNamedField>(map, static_cast<HValue*>(NULL), 1457 HObjectAccess::ForMapInstanceType()); 1458 1459 // Non-unique string, check for a string with a hash code that is actually 1460 // an index. 1461 STATIC_ASSERT(LAST_UNIQUE_NAME_TYPE == FIRST_NONSTRING_TYPE); 1462 IfBuilder not_string_or_name_if(this); 1463 not_string_or_name_if.If<HCompareNumericAndBranch>( 1464 instance_type, 1465 Add<HConstant>(LAST_UNIQUE_NAME_TYPE), 1466 Token::GT); 1467 1468 not_string_or_name_if.Then(); 1469 { 1470 // Non-smi, non-Name, non-String: Try to convert to smi in case of 1471 // HeapNumber. 1472 // TODO(danno): This could call some variant of ToString 1473 Push(AddUncasted<HForceRepresentation>(key, Representation::Smi())); 1474 } 1475 not_string_or_name_if.Else(); 1476 { 1477 // String or Name: check explicitly for Name, they can short-circuit 1478 // directly to unique non-index key path. 1479 IfBuilder not_symbol_if(this); 1480 not_symbol_if.If<HCompareNumericAndBranch>( 1481 instance_type, 1482 Add<HConstant>(SYMBOL_TYPE), 1483 Token::NE); 1484 1485 not_symbol_if.Then(); 1486 { 1487 // String: check whether the String is a String of an index. If it is, 1488 // extract the index value from the hash. 1489 HValue* hash = 1490 Add<HLoadNamedField>(key, static_cast<HValue*>(NULL), 1491 HObjectAccess::ForNameHashField()); 1492 HValue* not_index_mask = Add<HConstant>(static_cast<int>( 1493 String::kContainsCachedArrayIndexMask)); 1494 1495 HValue* not_index_test = AddUncasted<HBitwise>( 1496 Token::BIT_AND, hash, not_index_mask); 1497 1498 IfBuilder string_index_if(this); 1499 string_index_if.If<HCompareNumericAndBranch>(not_index_test, 1500 graph()->GetConstant0(), 1501 Token::EQ); 1502 string_index_if.Then(); 1503 { 1504 // String with index in hash: extract string and merge to index path. 1505 Push(BuildDecodeField<String::ArrayIndexValueBits>(hash)); 1506 } 1507 string_index_if.Else(); 1508 { 1509 // Key is a non-index String, check for uniqueness/internalization. If 1510 // it's not, deopt. 1511 HValue* not_internalized_bit = AddUncasted<HBitwise>( 1512 Token::BIT_AND, 1513 instance_type, 1514 Add<HConstant>(static_cast<int>(kIsNotInternalizedMask))); 1515 DeoptimizeIf<HCompareNumericAndBranch>( 1516 not_internalized_bit, 1517 graph()->GetConstant0(), 1518 Token::NE, 1519 "BuildKeyedIndexCheck: string isn't internalized"); 1520 // Key guaranteed to be a unqiue string 1521 Push(key); 1522 } 1523 string_index_if.JoinContinuation(join_continuation); 1524 } 1525 not_symbol_if.Else(); 1526 { 1527 Push(key); // Key is symbol 1528 } 1529 not_symbol_if.JoinContinuation(join_continuation); 1530 } 1531 not_string_or_name_if.JoinContinuation(join_continuation); 1532 } 1533 key_smi_if.JoinContinuation(join_continuation); 1534 } 1535 1536 1537 void HGraphBuilder::BuildNonGlobalObjectCheck(HValue* receiver) { 1538 // Get the the instance type of the receiver, and make sure that it is 1539 // not one of the global object types. 1540 HValue* map = Add<HLoadNamedField>(receiver, static_cast<HValue*>(NULL), 1541 HObjectAccess::ForMap()); 1542 HValue* instance_type = 1543 Add<HLoadNamedField>(map, static_cast<HValue*>(NULL), 1544 HObjectAccess::ForMapInstanceType()); 1545 STATIC_ASSERT(JS_BUILTINS_OBJECT_TYPE == JS_GLOBAL_OBJECT_TYPE + 1); 1546 HValue* min_global_type = Add<HConstant>(JS_GLOBAL_OBJECT_TYPE); 1547 HValue* max_global_type = Add<HConstant>(JS_BUILTINS_OBJECT_TYPE); 1548 1549 IfBuilder if_global_object(this); 1550 if_global_object.If<HCompareNumericAndBranch>(instance_type, 1551 max_global_type, 1552 Token::LTE); 1553 if_global_object.And(); 1554 if_global_object.If<HCompareNumericAndBranch>(instance_type, 1555 min_global_type, 1556 Token::GTE); 1557 if_global_object.ThenDeopt("receiver was a global object"); 1558 if_global_object.End(); 1559 } 1560 1561 1562 void HGraphBuilder::BuildTestForDictionaryProperties( 1563 HValue* object, 1564 HIfContinuation* continuation) { 1565 HValue* properties = Add<HLoadNamedField>( 1566 object, static_cast<HValue*>(NULL), 1567 HObjectAccess::ForPropertiesPointer()); 1568 HValue* properties_map = 1569 Add<HLoadNamedField>(properties, static_cast<HValue*>(NULL), 1570 HObjectAccess::ForMap()); 1571 HValue* hash_map = Add<HLoadRoot>(Heap::kHashTableMapRootIndex); 1572 IfBuilder builder(this); 1573 builder.If<HCompareObjectEqAndBranch>(properties_map, hash_map); 1574 builder.CaptureContinuation(continuation); 1575 } 1576 1577 1578 HValue* HGraphBuilder::BuildKeyedLookupCacheHash(HValue* object, 1579 HValue* key) { 1580 // Load the map of the receiver, compute the keyed lookup cache hash 1581 // based on 32 bits of the map pointer and the string hash. 1582 HValue* object_map = 1583 Add<HLoadNamedField>(object, static_cast<HValue*>(NULL), 1584 HObjectAccess::ForMapAsInteger32()); 1585 HValue* shifted_map = AddUncasted<HShr>( 1586 object_map, Add<HConstant>(KeyedLookupCache::kMapHashShift)); 1587 HValue* string_hash = 1588 Add<HLoadNamedField>(key, static_cast<HValue*>(NULL), 1589 HObjectAccess::ForStringHashField()); 1590 HValue* shifted_hash = AddUncasted<HShr>( 1591 string_hash, Add<HConstant>(String::kHashShift)); 1592 HValue* xor_result = AddUncasted<HBitwise>(Token::BIT_XOR, shifted_map, 1593 shifted_hash); 1594 int mask = (KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask); 1595 return AddUncasted<HBitwise>(Token::BIT_AND, xor_result, 1596 Add<HConstant>(mask)); 1597 } 1598 1599 1600 HValue* HGraphBuilder::BuildUncheckedDictionaryElementLoadHelper( 1601 HValue* elements, 1602 HValue* key, 1603 HValue* hash, 1604 HValue* mask, 1605 int current_probe) { 1606 if (current_probe == kNumberDictionaryProbes) { 1607 return NULL; 1608 } 1609 1610 int32_t offset = SeededNumberDictionary::GetProbeOffset(current_probe); 1611 HValue* raw_index = (current_probe == 0) 1612 ? hash 1613 : AddUncasted<HAdd>(hash, Add<HConstant>(offset)); 1614 raw_index = AddUncasted<HBitwise>(Token::BIT_AND, raw_index, mask); 1615 int32_t entry_size = SeededNumberDictionary::kEntrySize; 1616 raw_index = AddUncasted<HMul>(raw_index, Add<HConstant>(entry_size)); 1617 raw_index->ClearFlag(HValue::kCanOverflow); 1618 1619 int32_t base_offset = SeededNumberDictionary::kElementsStartIndex; 1620 HValue* key_index = AddUncasted<HAdd>(raw_index, Add<HConstant>(base_offset)); 1621 key_index->ClearFlag(HValue::kCanOverflow); 1622 1623 HValue* candidate_key = Add<HLoadKeyed>(elements, key_index, 1624 static_cast<HValue*>(NULL), 1625 FAST_ELEMENTS); 1626 1627 IfBuilder key_compare(this); 1628 key_compare.IfNot<HCompareObjectEqAndBranch>(key, candidate_key); 1629 key_compare.Then(); 1630 { 1631 // Key at the current probe doesn't match, try at the next probe. 1632 HValue* result = BuildUncheckedDictionaryElementLoadHelper( 1633 elements, key, hash, mask, current_probe + 1); 1634 if (result == NULL) { 1635 key_compare.Deopt("probes exhausted in keyed load dictionary lookup"); 1636 result = graph()->GetConstantUndefined(); 1637 } else { 1638 Push(result); 1639 } 1640 } 1641 key_compare.Else(); 1642 { 1643 // Key at current probe matches. Details must be zero, otherwise the 1644 // dictionary element requires special handling. 1645 HValue* details_index = AddUncasted<HAdd>( 1646 raw_index, Add<HConstant>(base_offset + 2)); 1647 details_index->ClearFlag(HValue::kCanOverflow); 1648 1649 HValue* details = Add<HLoadKeyed>(elements, details_index, 1650 static_cast<HValue*>(NULL), 1651 FAST_ELEMENTS); 1652 IfBuilder details_compare(this); 1653 details_compare.If<HCompareNumericAndBranch>(details, 1654 graph()->GetConstant0(), 1655 Token::NE); 1656 details_compare.ThenDeopt("keyed load dictionary element not fast case"); 1657 1658 details_compare.Else(); 1659 { 1660 // Key matches and details are zero --> fast case. Load and return the 1661 // value. 1662 HValue* result_index = AddUncasted<HAdd>( 1663 raw_index, Add<HConstant>(base_offset + 1)); 1664 result_index->ClearFlag(HValue::kCanOverflow); 1665 1666 Push(Add<HLoadKeyed>(elements, result_index, 1667 static_cast<HValue*>(NULL), 1668 FAST_ELEMENTS)); 1669 } 1670 details_compare.End(); 1671 } 1672 key_compare.End(); 1673 1674 return Pop(); 1675 } 1676 1677 1678 HValue* HGraphBuilder::BuildElementIndexHash(HValue* index) { 1679 int32_t seed_value = static_cast<uint32_t>(isolate()->heap()->HashSeed()); 1680 HValue* seed = Add<HConstant>(seed_value); 1681 HValue* hash = AddUncasted<HBitwise>(Token::BIT_XOR, index, seed); 1682 1683 // hash = ~hash + (hash << 15); 1684 HValue* shifted_hash = AddUncasted<HShl>(hash, Add<HConstant>(15)); 1685 HValue* not_hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash, 1686 graph()->GetConstantMinus1()); 1687 hash = AddUncasted<HAdd>(shifted_hash, not_hash); 1688 1689 // hash = hash ^ (hash >> 12); 1690 shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(12)); 1691 hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash); 1692 1693 // hash = hash + (hash << 2); 1694 shifted_hash = AddUncasted<HShl>(hash, Add<HConstant>(2)); 1695 hash = AddUncasted<HAdd>(hash, shifted_hash); 1696 1697 // hash = hash ^ (hash >> 4); 1698 shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(4)); 1699 hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash); 1700 1701 // hash = hash * 2057; 1702 hash = AddUncasted<HMul>(hash, Add<HConstant>(2057)); 1703 hash->ClearFlag(HValue::kCanOverflow); 1704 1705 // hash = hash ^ (hash >> 16); 1706 shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(16)); 1707 return AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash); 1708 } 1709 1710 1711 HValue* HGraphBuilder::BuildUncheckedDictionaryElementLoad(HValue* receiver, 1712 HValue* elements, 1713 HValue* key, 1714 HValue* hash) { 1715 HValue* capacity = Add<HLoadKeyed>( 1716 elements, 1717 Add<HConstant>(NameDictionary::kCapacityIndex), 1718 static_cast<HValue*>(NULL), 1719 FAST_ELEMENTS); 1720 1721 HValue* mask = AddUncasted<HSub>(capacity, graph()->GetConstant1()); 1722 mask->ChangeRepresentation(Representation::Integer32()); 1723 mask->ClearFlag(HValue::kCanOverflow); 1724 1725 return BuildUncheckedDictionaryElementLoadHelper(elements, key, 1726 hash, mask, 0); 1727 } 1728 1729 1730 HValue* HGraphBuilder::BuildRegExpConstructResult(HValue* length, 1731 HValue* index, 1732 HValue* input) { 1733 NoObservableSideEffectsScope scope(this); 1734 HConstant* max_length = Add<HConstant>(JSObject::kInitialMaxFastElementArray); 1735 Add<HBoundsCheck>(length, max_length); 1736 1737 // Generate size calculation code here in order to make it dominate 1738 // the JSRegExpResult allocation. 1739 ElementsKind elements_kind = FAST_ELEMENTS; 1740 HValue* size = BuildCalculateElementsSize(elements_kind, length); 1741 1742 // Allocate the JSRegExpResult and the FixedArray in one step. 1743 HValue* result = Add<HAllocate>( 1744 Add<HConstant>(JSRegExpResult::kSize), HType::JSArray(), 1745 NOT_TENURED, JS_ARRAY_TYPE); 1746 1747 // Initialize the JSRegExpResult header. 1748 HValue* global_object = Add<HLoadNamedField>( 1749 context(), static_cast<HValue*>(NULL), 1750 HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX)); 1751 HValue* native_context = Add<HLoadNamedField>( 1752 global_object, static_cast<HValue*>(NULL), 1753 HObjectAccess::ForGlobalObjectNativeContext()); 1754 Add<HStoreNamedField>( 1755 result, HObjectAccess::ForMap(), 1756 Add<HLoadNamedField>( 1757 native_context, static_cast<HValue*>(NULL), 1758 HObjectAccess::ForContextSlot(Context::REGEXP_RESULT_MAP_INDEX))); 1759 HConstant* empty_fixed_array = 1760 Add<HConstant>(isolate()->factory()->empty_fixed_array()); 1761 Add<HStoreNamedField>( 1762 result, HObjectAccess::ForJSArrayOffset(JSArray::kPropertiesOffset), 1763 empty_fixed_array); 1764 Add<HStoreNamedField>( 1765 result, HObjectAccess::ForJSArrayOffset(JSArray::kElementsOffset), 1766 empty_fixed_array); 1767 Add<HStoreNamedField>( 1768 result, HObjectAccess::ForJSArrayOffset(JSArray::kLengthOffset), length); 1769 1770 // Initialize the additional fields. 1771 Add<HStoreNamedField>( 1772 result, HObjectAccess::ForJSArrayOffset(JSRegExpResult::kIndexOffset), 1773 index); 1774 Add<HStoreNamedField>( 1775 result, HObjectAccess::ForJSArrayOffset(JSRegExpResult::kInputOffset), 1776 input); 1777 1778 // Allocate and initialize the elements header. 1779 HAllocate* elements = BuildAllocateElements(elements_kind, size); 1780 BuildInitializeElementsHeader(elements, elements_kind, length); 1781 1782 HConstant* size_in_bytes_upper_bound = EstablishElementsAllocationSize( 1783 elements_kind, max_length->Integer32Value()); 1784 elements->set_size_upper_bound(size_in_bytes_upper_bound); 1785 1786 Add<HStoreNamedField>( 1787 result, HObjectAccess::ForJSArrayOffset(JSArray::kElementsOffset), 1788 elements); 1789 1790 // Initialize the elements contents with undefined. 1791 BuildFillElementsWithValue( 1792 elements, elements_kind, graph()->GetConstant0(), length, 1793 graph()->GetConstantUndefined()); 1794 1795 return result; 1796 } 1797 1798 1799 HValue* HGraphBuilder::BuildNumberToString(HValue* object, Type* type) { 1800 NoObservableSideEffectsScope scope(this); 1801 1802 // Convert constant numbers at compile time. 1803 if (object->IsConstant() && HConstant::cast(object)->HasNumberValue()) { 1804 Handle<Object> number = HConstant::cast(object)->handle(isolate()); 1805 Handle<String> result = isolate()->factory()->NumberToString(number); 1806 return Add<HConstant>(result); 1807 } 1808 1809 // Create a joinable continuation. 1810 HIfContinuation found(graph()->CreateBasicBlock(), 1811 graph()->CreateBasicBlock()); 1812 1813 // Load the number string cache. 1814 HValue* number_string_cache = 1815 Add<HLoadRoot>(Heap::kNumberStringCacheRootIndex); 1816 1817 // Make the hash mask from the length of the number string cache. It 1818 // contains two elements (number and string) for each cache entry. 1819 HValue* mask = AddLoadFixedArrayLength(number_string_cache); 1820 mask->set_type(HType::Smi()); 1821 mask = AddUncasted<HSar>(mask, graph()->GetConstant1()); 1822 mask = AddUncasted<HSub>(mask, graph()->GetConstant1()); 1823 1824 // Check whether object is a smi. 1825 IfBuilder if_objectissmi(this); 1826 if_objectissmi.If<HIsSmiAndBranch>(object); 1827 if_objectissmi.Then(); 1828 { 1829 // Compute hash for smi similar to smi_get_hash(). 1830 HValue* hash = AddUncasted<HBitwise>(Token::BIT_AND, object, mask); 1831 1832 // Load the key. 1833 HValue* key_index = AddUncasted<HShl>(hash, graph()->GetConstant1()); 1834 HValue* key = Add<HLoadKeyed>(number_string_cache, key_index, 1835 static_cast<HValue*>(NULL), 1836 FAST_ELEMENTS, ALLOW_RETURN_HOLE); 1837 1838 // Check if object == key. 1839 IfBuilder if_objectiskey(this); 1840 if_objectiskey.If<HCompareObjectEqAndBranch>(object, key); 1841 if_objectiskey.Then(); 1842 { 1843 // Make the key_index available. 1844 Push(key_index); 1845 } 1846 if_objectiskey.JoinContinuation(&found); 1847 } 1848 if_objectissmi.Else(); 1849 { 1850 if (type->Is(Type::SignedSmall())) { 1851 if_objectissmi.Deopt("Expected smi"); 1852 } else { 1853 // Check if the object is a heap number. 1854 IfBuilder if_objectisnumber(this); 1855 HValue* objectisnumber = if_objectisnumber.If<HCompareMap>( 1856 object, isolate()->factory()->heap_number_map()); 1857 if_objectisnumber.Then(); 1858 { 1859 // Compute hash for heap number similar to double_get_hash(). 1860 HValue* low = Add<HLoadNamedField>( 1861 object, objectisnumber, 1862 HObjectAccess::ForHeapNumberValueLowestBits()); 1863 HValue* high = Add<HLoadNamedField>( 1864 object, objectisnumber, 1865 HObjectAccess::ForHeapNumberValueHighestBits()); 1866 HValue* hash = AddUncasted<HBitwise>(Token::BIT_XOR, low, high); 1867 hash = AddUncasted<HBitwise>(Token::BIT_AND, hash, mask); 1868 1869 // Load the key. 1870 HValue* key_index = AddUncasted<HShl>(hash, graph()->GetConstant1()); 1871 HValue* key = Add<HLoadKeyed>(number_string_cache, key_index, 1872 static_cast<HValue*>(NULL), 1873 FAST_ELEMENTS, ALLOW_RETURN_HOLE); 1874 1875 // Check if the key is a heap number and compare it with the object. 1876 IfBuilder if_keyisnotsmi(this); 1877 HValue* keyisnotsmi = if_keyisnotsmi.IfNot<HIsSmiAndBranch>(key); 1878 if_keyisnotsmi.Then(); 1879 { 1880 IfBuilder if_keyisheapnumber(this); 1881 if_keyisheapnumber.If<HCompareMap>( 1882 key, isolate()->factory()->heap_number_map()); 1883 if_keyisheapnumber.Then(); 1884 { 1885 // Check if values of key and object match. 1886 IfBuilder if_keyeqobject(this); 1887 if_keyeqobject.If<HCompareNumericAndBranch>( 1888 Add<HLoadNamedField>(key, keyisnotsmi, 1889 HObjectAccess::ForHeapNumberValue()), 1890 Add<HLoadNamedField>(object, objectisnumber, 1891 HObjectAccess::ForHeapNumberValue()), 1892 Token::EQ); 1893 if_keyeqobject.Then(); 1894 { 1895 // Make the key_index available. 1896 Push(key_index); 1897 } 1898 if_keyeqobject.JoinContinuation(&found); 1899 } 1900 if_keyisheapnumber.JoinContinuation(&found); 1901 } 1902 if_keyisnotsmi.JoinContinuation(&found); 1903 } 1904 if_objectisnumber.Else(); 1905 { 1906 if (type->Is(Type::Number())) { 1907 if_objectisnumber.Deopt("Expected heap number"); 1908 } 1909 } 1910 if_objectisnumber.JoinContinuation(&found); 1911 } 1912 } 1913 if_objectissmi.JoinContinuation(&found); 1914 1915 // Check for cache hit. 1916 IfBuilder if_found(this, &found); 1917 if_found.Then(); 1918 { 1919 // Count number to string operation in native code. 1920 AddIncrementCounter(isolate()->counters()->number_to_string_native()); 1921 1922 // Load the value in case of cache hit. 1923 HValue* key_index = Pop(); 1924 HValue* value_index = AddUncasted<HAdd>(key_index, graph()->GetConstant1()); 1925 Push(Add<HLoadKeyed>(number_string_cache, value_index, 1926 static_cast<HValue*>(NULL), 1927 FAST_ELEMENTS, ALLOW_RETURN_HOLE)); 1928 } 1929 if_found.Else(); 1930 { 1931 // Cache miss, fallback to runtime. 1932 Add<HPushArguments>(object); 1933 Push(Add<HCallRuntime>( 1934 isolate()->factory()->empty_string(), 1935 Runtime::FunctionForId(Runtime::kHiddenNumberToStringSkipCache), 1936 1)); 1937 } 1938 if_found.End(); 1939 1940 return Pop(); 1941 } 1942 1943 1944 HAllocate* HGraphBuilder::BuildAllocate( 1945 HValue* object_size, 1946 HType type, 1947 InstanceType instance_type, 1948 HAllocationMode allocation_mode) { 1949 // Compute the effective allocation size. 1950 HValue* size = object_size; 1951 if (allocation_mode.CreateAllocationMementos()) { 1952 size = AddUncasted<HAdd>(size, Add<HConstant>(AllocationMemento::kSize)); 1953 size->ClearFlag(HValue::kCanOverflow); 1954 } 1955 1956 // Perform the actual allocation. 1957 HAllocate* object = Add<HAllocate>( 1958 size, type, allocation_mode.GetPretenureMode(), 1959 instance_type, allocation_mode.feedback_site()); 1960 1961 // Setup the allocation memento. 1962 if (allocation_mode.CreateAllocationMementos()) { 1963 BuildCreateAllocationMemento( 1964 object, object_size, allocation_mode.current_site()); 1965 } 1966 1967 return object; 1968 } 1969 1970 1971 HValue* HGraphBuilder::BuildAddStringLengths(HValue* left_length, 1972 HValue* right_length) { 1973 // Compute the combined string length and check against max string length. 1974 HValue* length = AddUncasted<HAdd>(left_length, right_length); 1975 // Check that length <= kMaxLength <=> length < MaxLength + 1. 1976 HValue* max_length = Add<HConstant>(String::kMaxLength + 1); 1977 Add<HBoundsCheck>(length, max_length); 1978 return length; 1979 } 1980 1981 1982 HValue* HGraphBuilder::BuildCreateConsString( 1983 HValue* length, 1984 HValue* left, 1985 HValue* right, 1986 HAllocationMode allocation_mode) { 1987 // Determine the string instance types. 1988 HInstruction* left_instance_type = AddLoadStringInstanceType(left); 1989 HInstruction* right_instance_type = AddLoadStringInstanceType(right); 1990 1991 // Allocate the cons string object. HAllocate does not care whether we 1992 // pass CONS_STRING_TYPE or CONS_ASCII_STRING_TYPE here, so we just use 1993 // CONS_STRING_TYPE here. Below we decide whether the cons string is 1994 // one-byte or two-byte and set the appropriate map. 1995 ASSERT(HAllocate::CompatibleInstanceTypes(CONS_STRING_TYPE, 1996 CONS_ASCII_STRING_TYPE)); 1997 HAllocate* result = BuildAllocate(Add<HConstant>(ConsString::kSize), 1998 HType::String(), CONS_STRING_TYPE, 1999 allocation_mode); 2000 2001 // Compute intersection and difference of instance types. 2002 HValue* anded_instance_types = AddUncasted<HBitwise>( 2003 Token::BIT_AND, left_instance_type, right_instance_type); 2004 HValue* xored_instance_types = AddUncasted<HBitwise>( 2005 Token::BIT_XOR, left_instance_type, right_instance_type); 2006 2007 // We create a one-byte cons string if 2008 // 1. both strings are one-byte, or 2009 // 2. at least one of the strings is two-byte, but happens to contain only 2010 // one-byte characters. 2011 // To do this, we check 2012 // 1. if both strings are one-byte, or if the one-byte data hint is set in 2013 // both strings, or 2014 // 2. if one of the strings has the one-byte data hint set and the other 2015 // string is one-byte. 2016 IfBuilder if_onebyte(this); 2017 STATIC_ASSERT(kOneByteStringTag != 0); 2018 STATIC_ASSERT(kOneByteDataHintMask != 0); 2019 if_onebyte.If<HCompareNumericAndBranch>( 2020 AddUncasted<HBitwise>( 2021 Token::BIT_AND, anded_instance_types, 2022 Add<HConstant>(static_cast<int32_t>( 2023 kStringEncodingMask | kOneByteDataHintMask))), 2024 graph()->GetConstant0(), Token::NE); 2025 if_onebyte.Or(); 2026 STATIC_ASSERT(kOneByteStringTag != 0 && 2027 kOneByteDataHintTag != 0 && 2028 kOneByteDataHintTag != kOneByteStringTag); 2029 if_onebyte.If<HCompareNumericAndBranch>( 2030 AddUncasted<HBitwise>( 2031 Token::BIT_AND, xored_instance_types, 2032 Add<HConstant>(static_cast<int32_t>( 2033 kOneByteStringTag | kOneByteDataHintTag))), 2034 Add<HConstant>(static_cast<int32_t>( 2035 kOneByteStringTag | kOneByteDataHintTag)), Token::EQ); 2036 if_onebyte.Then(); 2037 { 2038 // We can safely skip the write barrier for storing the map here. 2039 Add<HStoreNamedField>( 2040 result, HObjectAccess::ForMap(), 2041 Add<HConstant>(isolate()->factory()->cons_ascii_string_map())); 2042 } 2043 if_onebyte.Else(); 2044 { 2045 // We can safely skip the write barrier for storing the map here. 2046 Add<HStoreNamedField>( 2047 result, HObjectAccess::ForMap(), 2048 Add<HConstant>(isolate()->factory()->cons_string_map())); 2049 } 2050 if_onebyte.End(); 2051 2052 // Initialize the cons string fields. 2053 Add<HStoreNamedField>(result, HObjectAccess::ForStringHashField(), 2054 Add<HConstant>(String::kEmptyHashField)); 2055 Add<HStoreNamedField>(result, HObjectAccess::ForStringLength(), length); 2056 Add<HStoreNamedField>(result, HObjectAccess::ForConsStringFirst(), left); 2057 Add<HStoreNamedField>(result, HObjectAccess::ForConsStringSecond(), right); 2058 2059 // Count the native string addition. 2060 AddIncrementCounter(isolate()->counters()->string_add_native()); 2061 2062 return result; 2063 } 2064 2065 2066 void HGraphBuilder::BuildCopySeqStringChars(HValue* src, 2067 HValue* src_offset, 2068 String::Encoding src_encoding, 2069 HValue* dst, 2070 HValue* dst_offset, 2071 String::Encoding dst_encoding, 2072 HValue* length) { 2073 ASSERT(dst_encoding != String::ONE_BYTE_ENCODING || 2074 src_encoding == String::ONE_BYTE_ENCODING); 2075 LoopBuilder loop(this, context(), LoopBuilder::kPostIncrement); 2076 HValue* index = loop.BeginBody(graph()->GetConstant0(), length, Token::LT); 2077 { 2078 HValue* src_index = AddUncasted<HAdd>(src_offset, index); 2079 HValue* value = 2080 AddUncasted<HSeqStringGetChar>(src_encoding, src, src_index); 2081 HValue* dst_index = AddUncasted<HAdd>(dst_offset, index); 2082 Add<HSeqStringSetChar>(dst_encoding, dst, dst_index, value); 2083 } 2084 loop.EndBody(); 2085 } 2086 2087 2088 HValue* HGraphBuilder::BuildObjectSizeAlignment( 2089 HValue* unaligned_size, int header_size) { 2090 ASSERT((header_size & kObjectAlignmentMask) == 0); 2091 HValue* size = AddUncasted<HAdd>( 2092 unaligned_size, Add<HConstant>(static_cast<int32_t>( 2093 header_size + kObjectAlignmentMask))); 2094 size->ClearFlag(HValue::kCanOverflow); 2095 return AddUncasted<HBitwise>( 2096 Token::BIT_AND, size, Add<HConstant>(static_cast<int32_t>( 2097 ~kObjectAlignmentMask))); 2098 } 2099 2100 2101 HValue* HGraphBuilder::BuildUncheckedStringAdd( 2102 HValue* left, 2103 HValue* right, 2104 HAllocationMode allocation_mode) { 2105 // Determine the string lengths. 2106 HValue* left_length = AddLoadStringLength(left); 2107 HValue* right_length = AddLoadStringLength(right); 2108 2109 // Compute the combined string length. 2110 HValue* length = BuildAddStringLengths(left_length, right_length); 2111 2112 // Do some manual constant folding here. 2113 if (left_length->IsConstant()) { 2114 HConstant* c_left_length = HConstant::cast(left_length); 2115 ASSERT_NE(0, c_left_length->Integer32Value()); 2116 if (c_left_length->Integer32Value() + 1 >= ConsString::kMinLength) { 2117 // The right string contains at least one character. 2118 return BuildCreateConsString(length, left, right, allocation_mode); 2119 } 2120 } else if (right_length->IsConstant()) { 2121 HConstant* c_right_length = HConstant::cast(right_length); 2122 ASSERT_NE(0, c_right_length->Integer32Value()); 2123 if (c_right_length->Integer32Value() + 1 >= ConsString::kMinLength) { 2124 // The left string contains at least one character. 2125 return BuildCreateConsString(length, left, right, allocation_mode); 2126 } 2127 } 2128 2129 // Check if we should create a cons string. 2130 IfBuilder if_createcons(this); 2131 if_createcons.If<HCompareNumericAndBranch>( 2132 length, Add<HConstant>(ConsString::kMinLength), Token::GTE); 2133 if_createcons.Then(); 2134 { 2135 // Create a cons string. 2136 Push(BuildCreateConsString(length, left, right, allocation_mode)); 2137 } 2138 if_createcons.Else(); 2139 { 2140 // Determine the string instance types. 2141 HValue* left_instance_type = AddLoadStringInstanceType(left); 2142 HValue* right_instance_type = AddLoadStringInstanceType(right); 2143 2144 // Compute union and difference of instance types. 2145 HValue* ored_instance_types = AddUncasted<HBitwise>( 2146 Token::BIT_OR, left_instance_type, right_instance_type); 2147 HValue* xored_instance_types = AddUncasted<HBitwise>( 2148 Token::BIT_XOR, left_instance_type, right_instance_type); 2149 2150 // Check if both strings have the same encoding and both are 2151 // sequential. 2152 IfBuilder if_sameencodingandsequential(this); 2153 if_sameencodingandsequential.If<HCompareNumericAndBranch>( 2154 AddUncasted<HBitwise>( 2155 Token::BIT_AND, xored_instance_types, 2156 Add<HConstant>(static_cast<int32_t>(kStringEncodingMask))), 2157 graph()->GetConstant0(), Token::EQ); 2158 if_sameencodingandsequential.And(); 2159 STATIC_ASSERT(kSeqStringTag == 0); 2160 if_sameencodingandsequential.If<HCompareNumericAndBranch>( 2161 AddUncasted<HBitwise>( 2162 Token::BIT_AND, ored_instance_types, 2163 Add<HConstant>(static_cast<int32_t>(kStringRepresentationMask))), 2164 graph()->GetConstant0(), Token::EQ); 2165 if_sameencodingandsequential.Then(); 2166 { 2167 HConstant* string_map = 2168 Add<HConstant>(isolate()->factory()->string_map()); 2169 HConstant* ascii_string_map = 2170 Add<HConstant>(isolate()->factory()->ascii_string_map()); 2171 2172 // Determine map and size depending on whether result is one-byte string. 2173 IfBuilder if_onebyte(this); 2174 STATIC_ASSERT(kOneByteStringTag != 0); 2175 if_onebyte.If<HCompareNumericAndBranch>( 2176 AddUncasted<HBitwise>( 2177 Token::BIT_AND, ored_instance_types, 2178 Add<HConstant>(static_cast<int32_t>(kStringEncodingMask))), 2179 graph()->GetConstant0(), Token::NE); 2180 if_onebyte.Then(); 2181 { 2182 // Allocate sequential one-byte string object. 2183 Push(length); 2184 Push(ascii_string_map); 2185 } 2186 if_onebyte.Else(); 2187 { 2188 // Allocate sequential two-byte string object. 2189 HValue* size = AddUncasted<HShl>(length, graph()->GetConstant1()); 2190 size->ClearFlag(HValue::kCanOverflow); 2191 size->SetFlag(HValue::kUint32); 2192 Push(size); 2193 Push(string_map); 2194 } 2195 if_onebyte.End(); 2196 HValue* map = Pop(); 2197 2198 // Calculate the number of bytes needed for the characters in the 2199 // string while observing object alignment. 2200 STATIC_ASSERT((SeqString::kHeaderSize & kObjectAlignmentMask) == 0); 2201 HValue* size = BuildObjectSizeAlignment(Pop(), SeqString::kHeaderSize); 2202 2203 // Allocate the string object. HAllocate does not care whether we pass 2204 // STRING_TYPE or ASCII_STRING_TYPE here, so we just use STRING_TYPE here. 2205 HAllocate* result = BuildAllocate( 2206 size, HType::String(), STRING_TYPE, allocation_mode); 2207 Add<HStoreNamedField>(result, HObjectAccess::ForMap(), map); 2208 2209 // Initialize the string fields. 2210 Add<HStoreNamedField>(result, HObjectAccess::ForStringHashField(), 2211 Add<HConstant>(String::kEmptyHashField)); 2212 Add<HStoreNamedField>(result, HObjectAccess::ForStringLength(), length); 2213 2214 // Copy characters to the result string. 2215 IfBuilder if_twobyte(this); 2216 if_twobyte.If<HCompareObjectEqAndBranch>(map, string_map); 2217 if_twobyte.Then(); 2218 { 2219 // Copy characters from the left string. 2220 BuildCopySeqStringChars( 2221 left, graph()->GetConstant0(), String::TWO_BYTE_ENCODING, 2222 result, graph()->GetConstant0(), String::TWO_BYTE_ENCODING, 2223 left_length); 2224 2225 // Copy characters from the right string. 2226 BuildCopySeqStringChars( 2227 right, graph()->GetConstant0(), String::TWO_BYTE_ENCODING, 2228 result, left_length, String::TWO_BYTE_ENCODING, 2229 right_length); 2230 } 2231 if_twobyte.Else(); 2232 { 2233 // Copy characters from the left string. 2234 BuildCopySeqStringChars( 2235 left, graph()->GetConstant0(), String::ONE_BYTE_ENCODING, 2236 result, graph()->GetConstant0(), String::ONE_BYTE_ENCODING, 2237 left_length); 2238 2239 // Copy characters from the right string. 2240 BuildCopySeqStringChars( 2241 right, graph()->GetConstant0(), String::ONE_BYTE_ENCODING, 2242 result, left_length, String::ONE_BYTE_ENCODING, 2243 right_length); 2244 } 2245 if_twobyte.End(); 2246 2247 // Count the native string addition. 2248 AddIncrementCounter(isolate()->counters()->string_add_native()); 2249 2250 // Return the sequential string. 2251 Push(result); 2252 } 2253 if_sameencodingandsequential.Else(); 2254 { 2255 // Fallback to the runtime to add the two strings. 2256 Add<HPushArguments>(left, right); 2257 Push(Add<HCallRuntime>( 2258 isolate()->factory()->empty_string(), 2259 Runtime::FunctionForId(Runtime::kHiddenStringAdd), 2260 2)); 2261 } 2262 if_sameencodingandsequential.End(); 2263 } 2264 if_createcons.End(); 2265 2266 return Pop(); 2267 } 2268 2269 2270 HValue* HGraphBuilder::BuildStringAdd( 2271 HValue* left, 2272 HValue* right, 2273 HAllocationMode allocation_mode) { 2274 NoObservableSideEffectsScope no_effects(this); 2275 2276 // Determine string lengths. 2277 HValue* left_length = AddLoadStringLength(left); 2278 HValue* right_length = AddLoadStringLength(right); 2279 2280 // Check if left string is empty. 2281 IfBuilder if_leftempty(this); 2282 if_leftempty.If<HCompareNumericAndBranch>( 2283 left_length, graph()->GetConstant0(), Token::EQ); 2284 if_leftempty.Then(); 2285 { 2286 // Count the native string addition. 2287 AddIncrementCounter(isolate()->counters()->string_add_native()); 2288 2289 // Just return the right string. 2290 Push(right); 2291 } 2292 if_leftempty.Else(); 2293 { 2294 // Check if right string is empty. 2295 IfBuilder if_rightempty(this); 2296 if_rightempty.If<HCompareNumericAndBranch>( 2297 right_length, graph()->GetConstant0(), Token::EQ); 2298 if_rightempty.Then(); 2299 { 2300 // Count the native string addition. 2301 AddIncrementCounter(isolate()->counters()->string_add_native()); 2302 2303 // Just return the left string. 2304 Push(left); 2305 } 2306 if_rightempty.Else(); 2307 { 2308 // Add the two non-empty strings. 2309 Push(BuildUncheckedStringAdd(left, right, allocation_mode)); 2310 } 2311 if_rightempty.End(); 2312 } 2313 if_leftempty.End(); 2314 2315 return Pop(); 2316 } 2317 2318 2319 HInstruction* HGraphBuilder::BuildUncheckedMonomorphicElementAccess( 2320 HValue* checked_object, 2321 HValue* key, 2322 HValue* val, 2323 bool is_js_array, 2324 ElementsKind elements_kind, 2325 PropertyAccessType access_type, 2326 LoadKeyedHoleMode load_mode, 2327 KeyedAccessStoreMode store_mode) { 2328 ASSERT((!IsExternalArrayElementsKind(elements_kind) && 2329 !IsFixedTypedArrayElementsKind(elements_kind)) || 2330 !is_js_array); 2331 // No GVNFlag is necessary for ElementsKind if there is an explicit dependency 2332 // on a HElementsTransition instruction. The flag can also be removed if the 2333 // map to check has FAST_HOLEY_ELEMENTS, since there can be no further 2334 // ElementsKind transitions. Finally, the dependency can be removed for stores 2335 // for FAST_ELEMENTS, since a transition to HOLEY elements won't change the 2336 // generated store code. 2337 if ((elements_kind == FAST_HOLEY_ELEMENTS) || 2338 (elements_kind == FAST_ELEMENTS && access_type == STORE)) { 2339 checked_object->ClearDependsOnFlag(kElementsKind); 2340 } 2341 2342 bool fast_smi_only_elements = IsFastSmiElementsKind(elements_kind); 2343 bool fast_elements = IsFastObjectElementsKind(elements_kind); 2344 HValue* elements = AddLoadElements(checked_object); 2345 if (access_type == STORE && (fast_elements || fast_smi_only_elements) && 2346 store_mode != STORE_NO_TRANSITION_HANDLE_COW) { 2347 HCheckMaps* check_cow_map = Add<HCheckMaps>( 2348 elements, isolate()->factory()->fixed_array_map()); 2349 check_cow_map->ClearDependsOnFlag(kElementsKind); 2350 } 2351 HInstruction* length = NULL; 2352 if (is_js_array) { 2353 length = Add<HLoadNamedField>( 2354 checked_object->ActualValue(), checked_object, 2355 HObjectAccess::ForArrayLength(elements_kind)); 2356 } else { 2357 length = AddLoadFixedArrayLength(elements); 2358 } 2359 length->set_type(HType::Smi()); 2360 HValue* checked_key = NULL; 2361 if (IsExternalArrayElementsKind(elements_kind) || 2362 IsFixedTypedArrayElementsKind(elements_kind)) { 2363 HValue* backing_store; 2364 if (IsExternalArrayElementsKind(elements_kind)) { 2365 backing_store = Add<HLoadNamedField>( 2366 elements, static_cast<HValue*>(NULL), 2367 HObjectAccess::ForExternalArrayExternalPointer()); 2368 } else { 2369 backing_store = elements; 2370 } 2371 if (store_mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS) { 2372 NoObservableSideEffectsScope no_effects(this); 2373 IfBuilder length_checker(this); 2374 length_checker.If<HCompareNumericAndBranch>(key, length, Token::LT); 2375 length_checker.Then(); 2376 IfBuilder negative_checker(this); 2377 HValue* bounds_check = negative_checker.If<HCompareNumericAndBranch>( 2378 key, graph()->GetConstant0(), Token::GTE); 2379 negative_checker.Then(); 2380 HInstruction* result = AddElementAccess( 2381 backing_store, key, val, bounds_check, elements_kind, access_type); 2382 negative_checker.ElseDeopt("Negative key encountered"); 2383 negative_checker.End(); 2384 length_checker.End(); 2385 return result; 2386 } else { 2387 ASSERT(store_mode == STANDARD_STORE); 2388 checked_key = Add<HBoundsCheck>(key, length); 2389 return AddElementAccess( 2390 backing_store, checked_key, val, 2391 checked_object, elements_kind, access_type); 2392 } 2393 } 2394 ASSERT(fast_smi_only_elements || 2395 fast_elements || 2396 IsFastDoubleElementsKind(elements_kind)); 2397 2398 // In case val is stored into a fast smi array, assure that the value is a smi 2399 // before manipulating the backing store. Otherwise the actual store may 2400 // deopt, leaving the backing store in an invalid state. 2401 if (access_type == STORE && IsFastSmiElementsKind(elements_kind) && 2402 !val->type().IsSmi()) { 2403 val = AddUncasted<HForceRepresentation>(val, Representation::Smi()); 2404 } 2405 2406 if (IsGrowStoreMode(store_mode)) { 2407 NoObservableSideEffectsScope no_effects(this); 2408 Representation representation = HStoreKeyed::RequiredValueRepresentation( 2409 elements_kind, STORE_TO_INITIALIZED_ENTRY); 2410 val = AddUncasted<HForceRepresentation>(val, representation); 2411 elements = BuildCheckForCapacityGrow(checked_object, elements, 2412 elements_kind, length, key, 2413 is_js_array, access_type); 2414 checked_key = key; 2415 } else { 2416 checked_key = Add<HBoundsCheck>(key, length); 2417 2418 if (access_type == STORE && (fast_elements || fast_smi_only_elements)) { 2419 if (store_mode == STORE_NO_TRANSITION_HANDLE_COW) { 2420 NoObservableSideEffectsScope no_effects(this); 2421 elements = BuildCopyElementsOnWrite(checked_object, elements, 2422 elements_kind, length); 2423 } else { 2424 HCheckMaps* check_cow_map = Add<HCheckMaps>( 2425 elements, isolate()->factory()->fixed_array_map()); 2426 check_cow_map->ClearDependsOnFlag(kElementsKind); 2427 } 2428 } 2429 } 2430 return AddElementAccess(elements, checked_key, val, checked_object, 2431 elements_kind, access_type, load_mode); 2432 } 2433 2434 2435 HValue* HGraphBuilder::BuildAllocateArrayFromLength( 2436 JSArrayBuilder* array_builder, 2437 HValue* length_argument) { 2438 if (length_argument->IsConstant() && 2439 HConstant::cast(length_argument)->HasSmiValue()) { 2440 int array_length = HConstant::cast(length_argument)->Integer32Value(); 2441 if (array_length == 0) { 2442 return array_builder->AllocateEmptyArray(); 2443 } else { 2444 return array_builder->AllocateArray(length_argument, 2445 array_length, 2446 length_argument); 2447 } 2448 } 2449 2450 HValue* constant_zero = graph()->GetConstant0(); 2451 HConstant* max_alloc_length = 2452 Add<HConstant>(JSObject::kInitialMaxFastElementArray); 2453 HInstruction* checked_length = Add<HBoundsCheck>(length_argument, 2454 max_alloc_length); 2455 IfBuilder if_builder(this); 2456 if_builder.If<HCompareNumericAndBranch>(checked_length, constant_zero, 2457 Token::EQ); 2458 if_builder.Then(); 2459 const int initial_capacity = JSArray::kPreallocatedArrayElements; 2460 HConstant* initial_capacity_node = Add<HConstant>(initial_capacity); 2461 Push(initial_capacity_node); // capacity 2462 Push(constant_zero); // length 2463 if_builder.Else(); 2464 if (!(top_info()->IsStub()) && 2465 IsFastPackedElementsKind(array_builder->kind())) { 2466 // We'll come back later with better (holey) feedback. 2467 if_builder.Deopt("Holey array despite packed elements_kind feedback"); 2468 } else { 2469 Push(checked_length); // capacity 2470 Push(checked_length); // length 2471 } 2472 if_builder.End(); 2473 2474 // Figure out total size 2475 HValue* length = Pop(); 2476 HValue* capacity = Pop(); 2477 return array_builder->AllocateArray(capacity, max_alloc_length, length); 2478 } 2479 2480 2481 HValue* HGraphBuilder::BuildCalculateElementsSize(ElementsKind kind, 2482 HValue* capacity) { 2483 int elements_size = IsFastDoubleElementsKind(kind) 2484 ? kDoubleSize 2485 : kPointerSize; 2486 2487 HConstant* elements_size_value = Add<HConstant>(elements_size); 2488 HInstruction* mul = HMul::NewImul(zone(), context(), 2489 capacity->ActualValue(), 2490 elements_size_value); 2491 AddInstruction(mul); 2492 mul->ClearFlag(HValue::kCanOverflow); 2493 2494 STATIC_ASSERT(FixedDoubleArray::kHeaderSize == FixedArray::kHeaderSize); 2495 2496 HConstant* header_size = Add<HConstant>(FixedArray::kHeaderSize); 2497 HValue* total_size = AddUncasted<HAdd>(mul, header_size); 2498 total_size->ClearFlag(HValue::kCanOverflow); 2499 return total_size; 2500 } 2501 2502 2503 HAllocate* HGraphBuilder::AllocateJSArrayObject(AllocationSiteMode mode) { 2504 int base_size = JSArray::kSize; 2505 if (mode == TRACK_ALLOCATION_SITE) { 2506 base_size += AllocationMemento::kSize; 2507 } 2508 HConstant* size_in_bytes = Add<HConstant>(base_size); 2509 return Add<HAllocate>( 2510 size_in_bytes, HType::JSArray(), NOT_TENURED, JS_OBJECT_TYPE); 2511 } 2512 2513 2514 HConstant* HGraphBuilder::EstablishElementsAllocationSize( 2515 ElementsKind kind, 2516 int capacity) { 2517 int base_size = IsFastDoubleElementsKind(kind) 2518 ? FixedDoubleArray::SizeFor(capacity) 2519 : FixedArray::SizeFor(capacity); 2520 2521 return Add<HConstant>(base_size); 2522 } 2523 2524 2525 HAllocate* HGraphBuilder::BuildAllocateElements(ElementsKind kind, 2526 HValue* size_in_bytes) { 2527 InstanceType instance_type = IsFastDoubleElementsKind(kind) 2528 ? FIXED_DOUBLE_ARRAY_TYPE 2529 : FIXED_ARRAY_TYPE; 2530 2531 return Add<HAllocate>(size_in_bytes, HType::HeapObject(), NOT_TENURED, 2532 instance_type); 2533 } 2534 2535 2536 void HGraphBuilder::BuildInitializeElementsHeader(HValue* elements, 2537 ElementsKind kind, 2538 HValue* capacity) { 2539 Factory* factory = isolate()->factory(); 2540 Handle<Map> map = IsFastDoubleElementsKind(kind) 2541 ? factory->fixed_double_array_map() 2542 : factory->fixed_array_map(); 2543 2544 Add<HStoreNamedField>(elements, HObjectAccess::ForMap(), Add<HConstant>(map)); 2545 Add<HStoreNamedField>(elements, HObjectAccess::ForFixedArrayLength(), 2546 capacity); 2547 } 2548 2549 2550 HValue* HGraphBuilder::BuildAllocateElementsAndInitializeElementsHeader( 2551 ElementsKind kind, 2552 HValue* capacity) { 2553 // The HForceRepresentation is to prevent possible deopt on int-smi 2554 // conversion after allocation but before the new object fields are set. 2555 capacity = AddUncasted<HForceRepresentation>(capacity, Representation::Smi()); 2556 HValue* size_in_bytes = BuildCalculateElementsSize(kind, capacity); 2557 HValue* new_elements = BuildAllocateElements(kind, size_in_bytes); 2558 BuildInitializeElementsHeader(new_elements, kind, capacity); 2559 return new_elements; 2560 } 2561 2562 2563 void HGraphBuilder::BuildJSArrayHeader(HValue* array, 2564 HValue* array_map, 2565 HValue* elements, 2566 AllocationSiteMode mode, 2567 ElementsKind elements_kind, 2568 HValue* allocation_site_payload, 2569 HValue* length_field) { 2570 Add<HStoreNamedField>(array, HObjectAccess::ForMap(), array_map); 2571 2572 HConstant* empty_fixed_array = 2573 Add<HConstant>(isolate()->factory()->empty_fixed_array()); 2574 2575 Add<HStoreNamedField>( 2576 array, HObjectAccess::ForPropertiesPointer(), empty_fixed_array); 2577 2578 Add<HStoreNamedField>( 2579 array, HObjectAccess::ForElementsPointer(), 2580 elements != NULL ? elements : empty_fixed_array); 2581 2582 Add<HStoreNamedField>( 2583 array, HObjectAccess::ForArrayLength(elements_kind), length_field); 2584 2585 if (mode == TRACK_ALLOCATION_SITE) { 2586 BuildCreateAllocationMemento( 2587 array, Add<HConstant>(JSArray::kSize), allocation_site_payload); 2588 } 2589 } 2590 2591 2592 HInstruction* HGraphBuilder::AddElementAccess( 2593 HValue* elements, 2594 HValue* checked_key, 2595 HValue* val, 2596 HValue* dependency, 2597 ElementsKind elements_kind, 2598 PropertyAccessType access_type, 2599 LoadKeyedHoleMode load_mode) { 2600 if (access_type == STORE) { 2601 ASSERT(val != NULL); 2602 if (elements_kind == EXTERNAL_UINT8_CLAMPED_ELEMENTS || 2603 elements_kind == UINT8_CLAMPED_ELEMENTS) { 2604 val = Add<HClampToUint8>(val); 2605 } 2606 return Add<HStoreKeyed>(elements, checked_key, val, elements_kind, 2607 STORE_TO_INITIALIZED_ENTRY); 2608 } 2609 2610 ASSERT(access_type == LOAD); 2611 ASSERT(val == NULL); 2612 HLoadKeyed* load = Add<HLoadKeyed>( 2613 elements, checked_key, dependency, elements_kind, load_mode); 2614 if (FLAG_opt_safe_uint32_operations && 2615 (elements_kind == EXTERNAL_UINT32_ELEMENTS || 2616 elements_kind == UINT32_ELEMENTS)) { 2617 graph()->RecordUint32Instruction(load); 2618 } 2619 return load; 2620 } 2621 2622 2623 HLoadNamedField* HGraphBuilder::AddLoadMap(HValue* object, 2624 HValue* dependency) { 2625 return Add<HLoadNamedField>(object, dependency, HObjectAccess::ForMap()); 2626 } 2627 2628 2629 HLoadNamedField* HGraphBuilder::AddLoadElements(HValue* object, 2630 HValue* dependency) { 2631 return Add<HLoadNamedField>( 2632 object, dependency, HObjectAccess::ForElementsPointer()); 2633 } 2634 2635 2636 HLoadNamedField* HGraphBuilder::AddLoadFixedArrayLength( 2637 HValue* array, 2638 HValue* dependency) { 2639 return Add<HLoadNamedField>( 2640 array, dependency, HObjectAccess::ForFixedArrayLength()); 2641 } 2642 2643 2644 HLoadNamedField* HGraphBuilder::AddLoadArrayLength(HValue* array, 2645 ElementsKind kind, 2646 HValue* dependency) { 2647 return Add<HLoadNamedField>( 2648 array, dependency, HObjectAccess::ForArrayLength(kind)); 2649 } 2650 2651 2652 HValue* HGraphBuilder::BuildNewElementsCapacity(HValue* old_capacity) { 2653 HValue* half_old_capacity = AddUncasted<HShr>(old_capacity, 2654 graph_->GetConstant1()); 2655 2656 HValue* new_capacity = AddUncasted<HAdd>(half_old_capacity, old_capacity); 2657 new_capacity->ClearFlag(HValue::kCanOverflow); 2658 2659 HValue* min_growth = Add<HConstant>(16); 2660 2661 new_capacity = AddUncasted<HAdd>(new_capacity, min_growth); 2662 new_capacity->ClearFlag(HValue::kCanOverflow); 2663 2664 return new_capacity; 2665 } 2666 2667 2668 HValue* HGraphBuilder::BuildGrowElementsCapacity(HValue* object, 2669 HValue* elements, 2670 ElementsKind kind, 2671 ElementsKind new_kind, 2672 HValue* length, 2673 HValue* new_capacity) { 2674 Add<HBoundsCheck>(new_capacity, Add<HConstant>( 2675 (Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) >> 2676 ElementsKindToShiftSize(kind))); 2677 2678 HValue* new_elements = BuildAllocateElementsAndInitializeElementsHeader( 2679 new_kind, new_capacity); 2680 2681 BuildCopyElements(elements, kind, new_elements, 2682 new_kind, length, new_capacity); 2683 2684 Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(), 2685 new_elements); 2686 2687 return new_elements; 2688 } 2689 2690 2691 void HGraphBuilder::BuildFillElementsWithValue(HValue* elements, 2692 ElementsKind elements_kind, 2693 HValue* from, 2694 HValue* to, 2695 HValue* value) { 2696 if (to == NULL) { 2697 to = AddLoadFixedArrayLength(elements); 2698 } 2699 2700 // Special loop unfolding case 2701 STATIC_ASSERT(JSArray::kPreallocatedArrayElements <= 2702 kElementLoopUnrollThreshold); 2703 int initial_capacity = -1; 2704 if (from->IsInteger32Constant() && to->IsInteger32Constant()) { 2705 int constant_from = from->GetInteger32Constant(); 2706 int constant_to = to->GetInteger32Constant(); 2707 2708 if (constant_from == 0 && constant_to <= kElementLoopUnrollThreshold) { 2709 initial_capacity = constant_to; 2710 } 2711 } 2712 2713 // Since we're about to store a hole value, the store instruction below must 2714 // assume an elements kind that supports heap object values. 2715 if (IsFastSmiOrObjectElementsKind(elements_kind)) { 2716 elements_kind = FAST_HOLEY_ELEMENTS; 2717 } 2718 2719 if (initial_capacity >= 0) { 2720 for (int i = 0; i < initial_capacity; i++) { 2721 HInstruction* key = Add<HConstant>(i); 2722 Add<HStoreKeyed>(elements, key, value, elements_kind); 2723 } 2724 } else { 2725 // Carefully loop backwards so that the "from" remains live through the loop 2726 // rather than the to. This often corresponds to keeping length live rather 2727 // then capacity, which helps register allocation, since length is used more 2728 // other than capacity after filling with holes. 2729 LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement); 2730 2731 HValue* key = builder.BeginBody(to, from, Token::GT); 2732 2733 HValue* adjusted_key = AddUncasted<HSub>(key, graph()->GetConstant1()); 2734 adjusted_key->ClearFlag(HValue::kCanOverflow); 2735 2736 Add<HStoreKeyed>(elements, adjusted_key, value, elements_kind); 2737 2738 builder.EndBody(); 2739 } 2740 } 2741 2742 2743 void HGraphBuilder::BuildFillElementsWithHole(HValue* elements, 2744 ElementsKind elements_kind, 2745 HValue* from, 2746 HValue* to) { 2747 // Fast elements kinds need to be initialized in case statements below cause a 2748 // garbage collection. 2749 Factory* factory = isolate()->factory(); 2750 2751 double nan_double = FixedDoubleArray::hole_nan_as_double(); 2752 HValue* hole = IsFastSmiOrObjectElementsKind(elements_kind) 2753 ? Add<HConstant>(factory->the_hole_value()) 2754 : Add<HConstant>(nan_double); 2755 2756 BuildFillElementsWithValue(elements, elements_kind, from, to, hole); 2757 } 2758 2759 2760 void HGraphBuilder::BuildCopyElements(HValue* from_elements, 2761 ElementsKind from_elements_kind, 2762 HValue* to_elements, 2763 ElementsKind to_elements_kind, 2764 HValue* length, 2765 HValue* capacity) { 2766 int constant_capacity = -1; 2767 if (capacity != NULL && 2768 capacity->IsConstant() && 2769 HConstant::cast(capacity)->HasInteger32Value()) { 2770 int constant_candidate = HConstant::cast(capacity)->Integer32Value(); 2771 if (constant_candidate <= kElementLoopUnrollThreshold) { 2772 constant_capacity = constant_candidate; 2773 } 2774 } 2775 2776 bool pre_fill_with_holes = 2777 IsFastDoubleElementsKind(from_elements_kind) && 2778 IsFastObjectElementsKind(to_elements_kind); 2779 if (pre_fill_with_holes) { 2780 // If the copy might trigger a GC, make sure that the FixedArray is 2781 // pre-initialized with holes to make sure that it's always in a 2782 // consistent state. 2783 BuildFillElementsWithHole(to_elements, to_elements_kind, 2784 graph()->GetConstant0(), NULL); 2785 } 2786 2787 if (constant_capacity != -1) { 2788 // Unroll the loop for small elements kinds. 2789 for (int i = 0; i < constant_capacity; i++) { 2790 HValue* key_constant = Add<HConstant>(i); 2791 HInstruction* value = Add<HLoadKeyed>(from_elements, key_constant, 2792 static_cast<HValue*>(NULL), 2793 from_elements_kind); 2794 Add<HStoreKeyed>(to_elements, key_constant, value, to_elements_kind); 2795 } 2796 } else { 2797 if (!pre_fill_with_holes && 2798 (capacity == NULL || !length->Equals(capacity))) { 2799 BuildFillElementsWithHole(to_elements, to_elements_kind, 2800 length, NULL); 2801 } 2802 2803 if (capacity == NULL) { 2804 capacity = AddLoadFixedArrayLength(to_elements); 2805 } 2806 2807 LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement); 2808 2809 HValue* key = builder.BeginBody(length, graph()->GetConstant0(), 2810 Token::GT); 2811 2812 key = AddUncasted<HSub>(key, graph()->GetConstant1()); 2813 key->ClearFlag(HValue::kCanOverflow); 2814 2815 HValue* element = Add<HLoadKeyed>(from_elements, key, 2816 static_cast<HValue*>(NULL), 2817 from_elements_kind, 2818 ALLOW_RETURN_HOLE); 2819 2820 ElementsKind kind = (IsHoleyElementsKind(from_elements_kind) && 2821 IsFastSmiElementsKind(to_elements_kind)) 2822 ? FAST_HOLEY_ELEMENTS : to_elements_kind; 2823 2824 if (IsHoleyElementsKind(from_elements_kind) && 2825 from_elements_kind != to_elements_kind) { 2826 IfBuilder if_hole(this); 2827 if_hole.If<HCompareHoleAndBranch>(element); 2828 if_hole.Then(); 2829 HConstant* hole_constant = IsFastDoubleElementsKind(to_elements_kind) 2830 ? Add<HConstant>(FixedDoubleArray::hole_nan_as_double()) 2831 : graph()->GetConstantHole(); 2832 Add<HStoreKeyed>(to_elements, key, hole_constant, kind); 2833 if_hole.Else(); 2834 HStoreKeyed* store = Add<HStoreKeyed>(to_elements, key, element, kind); 2835 store->SetFlag(HValue::kAllowUndefinedAsNaN); 2836 if_hole.End(); 2837 } else { 2838 HStoreKeyed* store = Add<HStoreKeyed>(to_elements, key, element, kind); 2839 store->SetFlag(HValue::kAllowUndefinedAsNaN); 2840 } 2841 2842 builder.EndBody(); 2843 } 2844 2845 Counters* counters = isolate()->counters(); 2846 AddIncrementCounter(counters->inlined_copied_elements()); 2847 } 2848 2849 2850 HValue* HGraphBuilder::BuildCloneShallowArrayCow(HValue* boilerplate, 2851 HValue* allocation_site, 2852 AllocationSiteMode mode, 2853 ElementsKind kind) { 2854 HAllocate* array = AllocateJSArrayObject(mode); 2855 2856 HValue* map = AddLoadMap(boilerplate); 2857 HValue* elements = AddLoadElements(boilerplate); 2858 HValue* length = AddLoadArrayLength(boilerplate, kind); 2859 2860 BuildJSArrayHeader(array, 2861 map, 2862 elements, 2863 mode, 2864 FAST_ELEMENTS, 2865 allocation_site, 2866 length); 2867 return array; 2868 } 2869 2870 2871 HValue* HGraphBuilder::BuildCloneShallowArrayEmpty(HValue* boilerplate, 2872 HValue* allocation_site, 2873 AllocationSiteMode mode) { 2874 HAllocate* array = AllocateJSArrayObject(mode); 2875 2876 HValue* map = AddLoadMap(boilerplate); 2877 2878 BuildJSArrayHeader(array, 2879 map, 2880 NULL, // set elements to empty fixed array 2881 mode, 2882 FAST_ELEMENTS, 2883 allocation_site, 2884 graph()->GetConstant0()); 2885 return array; 2886 } 2887 2888 2889 HValue* HGraphBuilder::BuildCloneShallowArrayNonEmpty(HValue* boilerplate, 2890 HValue* allocation_site, 2891 AllocationSiteMode mode, 2892 ElementsKind kind) { 2893 HValue* boilerplate_elements = AddLoadElements(boilerplate); 2894 HValue* capacity = AddLoadFixedArrayLength(boilerplate_elements); 2895 2896 // Generate size calculation code here in order to make it dominate 2897 // the JSArray allocation. 2898 HValue* elements_size = BuildCalculateElementsSize(kind, capacity); 2899 2900 // Create empty JSArray object for now, store elimination should remove 2901 // redundant initialization of elements and length fields and at the same 2902 // time the object will be fully prepared for GC if it happens during 2903 // elements allocation. 2904 HValue* result = BuildCloneShallowArrayEmpty( 2905 boilerplate, allocation_site, mode); 2906 2907 HAllocate* elements = BuildAllocateElements(kind, elements_size); 2908 2909 // This function implicitly relies on the fact that the 2910 // FastCloneShallowArrayStub is called only for literals shorter than 2911 // JSObject::kInitialMaxFastElementArray. 2912 // Can't add HBoundsCheck here because otherwise the stub will eager a frame. 2913 HConstant* size_upper_bound = EstablishElementsAllocationSize( 2914 kind, JSObject::kInitialMaxFastElementArray); 2915 elements->set_size_upper_bound(size_upper_bound); 2916 2917 Add<HStoreNamedField>(result, HObjectAccess::ForElementsPointer(), elements); 2918 2919 // The allocation for the cloned array above causes register pressure on 2920 // machines with low register counts. Force a reload of the boilerplate 2921 // elements here to free up a register for the allocation to avoid unnecessary 2922 // spillage. 2923 boilerplate_elements = AddLoadElements(boilerplate); 2924 boilerplate_elements->SetFlag(HValue::kCantBeReplaced); 2925 2926 // Copy the elements array header. 2927 for (int i = 0; i < FixedArrayBase::kHeaderSize; i += kPointerSize) { 2928 HObjectAccess access = HObjectAccess::ForFixedArrayHeader(i); 2929 Add<HStoreNamedField>(elements, access, 2930 Add<HLoadNamedField>(boilerplate_elements, 2931 static_cast<HValue*>(NULL), access)); 2932 } 2933 2934 // And the result of the length 2935 HValue* length = AddLoadArrayLength(boilerplate, kind); 2936 Add<HStoreNamedField>(result, HObjectAccess::ForArrayLength(kind), length); 2937 2938 BuildCopyElements(boilerplate_elements, kind, elements, 2939 kind, length, NULL); 2940 return result; 2941 } 2942 2943 2944 void HGraphBuilder::BuildCompareNil( 2945 HValue* value, 2946 Type* type, 2947 HIfContinuation* continuation) { 2948 IfBuilder if_nil(this); 2949 bool some_case_handled = false; 2950 bool some_case_missing = false; 2951 2952 if (type->Maybe(Type::Null())) { 2953 if (some_case_handled) if_nil.Or(); 2954 if_nil.If<HCompareObjectEqAndBranch>(value, graph()->GetConstantNull()); 2955 some_case_handled = true; 2956 } else { 2957 some_case_missing = true; 2958 } 2959 2960 if (type->Maybe(Type::Undefined())) { 2961 if (some_case_handled) if_nil.Or(); 2962 if_nil.If<HCompareObjectEqAndBranch>(value, 2963 graph()->GetConstantUndefined()); 2964 some_case_handled = true; 2965 } else { 2966 some_case_missing = true; 2967 } 2968 2969 if (type->Maybe(Type::Undetectable())) { 2970 if (some_case_handled) if_nil.Or(); 2971 if_nil.If<HIsUndetectableAndBranch>(value); 2972 some_case_handled = true; 2973 } else { 2974 some_case_missing = true; 2975 } 2976 2977 if (some_case_missing) { 2978 if_nil.Then(); 2979 if_nil.Else(); 2980 if (type->NumClasses() == 1) { 2981 BuildCheckHeapObject(value); 2982 // For ICs, the map checked below is a sentinel map that gets replaced by 2983 // the monomorphic map when the code is used as a template to generate a 2984 // new IC. For optimized functions, there is no sentinel map, the map 2985 // emitted below is the actual monomorphic map. 2986 Add<HCheckMaps>(value, type->Classes().Current()); 2987 } else { 2988 if_nil.Deopt("Too many undetectable types"); 2989 } 2990 } 2991 2992 if_nil.CaptureContinuation(continuation); 2993 } 2994 2995 2996 void HGraphBuilder::BuildCreateAllocationMemento( 2997 HValue* previous_object, 2998 HValue* previous_object_size, 2999 HValue* allocation_site) { 3000 ASSERT(allocation_site != NULL); 3001 HInnerAllocatedObject* allocation_memento = Add<HInnerAllocatedObject>( 3002 previous_object, previous_object_size, HType::HeapObject()); 3003 AddStoreMapConstant( 3004 allocation_memento, isolate()->factory()->allocation_memento_map()); 3005 Add<HStoreNamedField>( 3006 allocation_memento, 3007 HObjectAccess::ForAllocationMementoSite(), 3008 allocation_site); 3009 if (FLAG_allocation_site_pretenuring) { 3010 HValue* memento_create_count = Add<HLoadNamedField>( 3011 allocation_site, static_cast<HValue*>(NULL), 3012 HObjectAccess::ForAllocationSiteOffset( 3013 AllocationSite::kPretenureCreateCountOffset)); 3014 memento_create_count = AddUncasted<HAdd>( 3015 memento_create_count, graph()->GetConstant1()); 3016 // This smi value is reset to zero after every gc, overflow isn't a problem 3017 // since the counter is bounded by the new space size. 3018 memento_create_count->ClearFlag(HValue::kCanOverflow); 3019 Add<HStoreNamedField>( 3020 allocation_site, HObjectAccess::ForAllocationSiteOffset( 3021 AllocationSite::kPretenureCreateCountOffset), memento_create_count); 3022 } 3023 } 3024 3025 3026 HInstruction* HGraphBuilder::BuildGetNativeContext(HValue* closure) { 3027 // Get the global context, then the native context 3028 HInstruction* context = 3029 Add<HLoadNamedField>(closure, static_cast<HValue*>(NULL), 3030 HObjectAccess::ForFunctionContextPointer()); 3031 HInstruction* global_object = Add<HLoadNamedField>( 3032 context, static_cast<HValue*>(NULL), 3033 HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX)); 3034 HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset( 3035 GlobalObject::kNativeContextOffset); 3036 return Add<HLoadNamedField>( 3037 global_object, static_cast<HValue*>(NULL), access); 3038 } 3039 3040 3041 HInstruction* HGraphBuilder::BuildGetNativeContext() { 3042 // Get the global context, then the native context 3043 HValue* global_object = Add<HLoadNamedField>( 3044 context(), static_cast<HValue*>(NULL), 3045 HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX)); 3046 return Add<HLoadNamedField>( 3047 global_object, static_cast<HValue*>(NULL), 3048 HObjectAccess::ForObservableJSObjectOffset( 3049 GlobalObject::kNativeContextOffset)); 3050 } 3051 3052 3053 HInstruction* HGraphBuilder::BuildGetArrayFunction() { 3054 HInstruction* native_context = BuildGetNativeContext(); 3055 HInstruction* index = 3056 Add<HConstant>(static_cast<int32_t>(Context::ARRAY_FUNCTION_INDEX)); 3057 return Add<HLoadKeyed>( 3058 native_context, index, static_cast<HValue*>(NULL), FAST_ELEMENTS); 3059 } 3060 3061 3062 HGraphBuilder::JSArrayBuilder::JSArrayBuilder(HGraphBuilder* builder, 3063 ElementsKind kind, 3064 HValue* allocation_site_payload, 3065 HValue* constructor_function, 3066 AllocationSiteOverrideMode override_mode) : 3067 builder_(builder), 3068 kind_(kind), 3069 allocation_site_payload_(allocation_site_payload), 3070 constructor_function_(constructor_function) { 3071 ASSERT(!allocation_site_payload->IsConstant() || 3072 HConstant::cast(allocation_site_payload)->handle( 3073 builder_->isolate())->IsAllocationSite()); 3074 mode_ = override_mode == DISABLE_ALLOCATION_SITES 3075 ? DONT_TRACK_ALLOCATION_SITE 3076 : AllocationSite::GetMode(kind); 3077 } 3078 3079 3080 HGraphBuilder::JSArrayBuilder::JSArrayBuilder(HGraphBuilder* builder, 3081 ElementsKind kind, 3082 HValue* constructor_function) : 3083 builder_(builder), 3084 kind_(kind), 3085 mode_(DONT_TRACK_ALLOCATION_SITE), 3086 allocation_site_payload_(NULL), 3087 constructor_function_(constructor_function) { 3088 } 3089 3090 3091 HValue* HGraphBuilder::JSArrayBuilder::EmitMapCode() { 3092 if (!builder()->top_info()->IsStub()) { 3093 // A constant map is fine. 3094 Handle<Map> map(builder()->isolate()->get_initial_js_array_map(kind_), 3095 builder()->isolate()); 3096 return builder()->Add<HConstant>(map); 3097 } 3098 3099 if (constructor_function_ != NULL && kind_ == GetInitialFastElementsKind()) { 3100 // No need for a context lookup if the kind_ matches the initial 3101 // map, because we can just load the map in that case. 3102 HObjectAccess access = HObjectAccess::ForPrototypeOrInitialMap(); 3103 return builder()->Add<HLoadNamedField>( 3104 constructor_function_, static_cast<HValue*>(NULL), access); 3105 } 3106 3107 // TODO(mvstanton): we should always have a constructor function if we 3108 // are creating a stub. 3109 HInstruction* native_context = constructor_function_ != NULL 3110 ? builder()->BuildGetNativeContext(constructor_function_) 3111 : builder()->BuildGetNativeContext(); 3112 3113 HInstruction* index = builder()->Add<HConstant>( 3114 static_cast<int32_t>(Context::JS_ARRAY_MAPS_INDEX)); 3115 3116 HInstruction* map_array = builder()->Add<HLoadKeyed>( 3117 native_context, index, static_cast<HValue*>(NULL), FAST_ELEMENTS); 3118 3119 HInstruction* kind_index = builder()->Add<HConstant>(kind_); 3120 3121 return builder()->Add<HLoadKeyed>( 3122 map_array, kind_index, static_cast<HValue*>(NULL), FAST_ELEMENTS); 3123 } 3124 3125 3126 HValue* HGraphBuilder::JSArrayBuilder::EmitInternalMapCode() { 3127 // Find the map near the constructor function 3128 HObjectAccess access = HObjectAccess::ForPrototypeOrInitialMap(); 3129 return builder()->Add<HLoadNamedField>( 3130 constructor_function_, static_cast<HValue*>(NULL), access); 3131 } 3132 3133 3134 HAllocate* HGraphBuilder::JSArrayBuilder::AllocateEmptyArray() { 3135 HConstant* capacity = builder()->Add<HConstant>(initial_capacity()); 3136 return AllocateArray(capacity, 3137 capacity, 3138 builder()->graph()->GetConstant0()); 3139 } 3140 3141 3142 HAllocate* HGraphBuilder::JSArrayBuilder::AllocateArray( 3143 HValue* capacity, 3144 HConstant* capacity_upper_bound, 3145 HValue* length_field, 3146 FillMode fill_mode) { 3147 return AllocateArray(capacity, 3148 capacity_upper_bound->GetInteger32Constant(), 3149 length_field, 3150 fill_mode); 3151 } 3152 3153 3154 HAllocate* HGraphBuilder::JSArrayBuilder::AllocateArray( 3155 HValue* capacity, 3156 int capacity_upper_bound, 3157 HValue* length_field, 3158 FillMode fill_mode) { 3159 HConstant* elememts_size_upper_bound = capacity->IsInteger32Constant() 3160 ? HConstant::cast(capacity) 3161 : builder()->EstablishElementsAllocationSize(kind_, capacity_upper_bound); 3162 3163 HAllocate* array = AllocateArray(capacity, length_field, fill_mode); 3164 if (!elements_location_->has_size_upper_bound()) { 3165 elements_location_->set_size_upper_bound(elememts_size_upper_bound); 3166 } 3167 return array; 3168 } 3169 3170 3171 HAllocate* HGraphBuilder::JSArrayBuilder::AllocateArray( 3172 HValue* capacity, 3173 HValue* length_field, 3174 FillMode fill_mode) { 3175 // These HForceRepresentations are because we store these as fields in the 3176 // objects we construct, and an int32-to-smi HChange could deopt. Accept 3177 // the deopt possibility now, before allocation occurs. 3178 capacity = 3179 builder()->AddUncasted<HForceRepresentation>(capacity, 3180 Representation::Smi()); 3181 length_field = 3182 builder()->AddUncasted<HForceRepresentation>(length_field, 3183 Representation::Smi()); 3184 3185 // Generate size calculation code here in order to make it dominate 3186 // the JSArray allocation. 3187 HValue* elements_size = 3188 builder()->BuildCalculateElementsSize(kind_, capacity); 3189 3190 // Allocate (dealing with failure appropriately) 3191 HAllocate* array_object = builder()->AllocateJSArrayObject(mode_); 3192 3193 // Fill in the fields: map, properties, length 3194 HValue* map; 3195 if (allocation_site_payload_ == NULL) { 3196 map = EmitInternalMapCode(); 3197 } else { 3198 map = EmitMapCode(); 3199 } 3200 3201 builder()->BuildJSArrayHeader(array_object, 3202 map, 3203 NULL, // set elements to empty fixed array 3204 mode_, 3205 kind_, 3206 allocation_site_payload_, 3207 length_field); 3208 3209 // Allocate and initialize the elements 3210 elements_location_ = builder()->BuildAllocateElements(kind_, elements_size); 3211 3212 builder()->BuildInitializeElementsHeader(elements_location_, kind_, capacity); 3213 3214 // Set the elements 3215 builder()->Add<HStoreNamedField>( 3216 array_object, HObjectAccess::ForElementsPointer(), elements_location_); 3217 3218 if (fill_mode == FILL_WITH_HOLE) { 3219 builder()->BuildFillElementsWithHole(elements_location_, kind_, 3220 graph()->GetConstant0(), capacity); 3221 } 3222 3223 return array_object; 3224 } 3225 3226 3227 HValue* HGraphBuilder::AddLoadJSBuiltin(Builtins::JavaScript builtin) { 3228 HValue* global_object = Add<HLoadNamedField>( 3229 context(), static_cast<HValue*>(NULL), 3230 HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX)); 3231 HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset( 3232 GlobalObject::kBuiltinsOffset); 3233 HValue* builtins = Add<HLoadNamedField>( 3234 global_object, static_cast<HValue*>(NULL), access); 3235 HObjectAccess function_access = HObjectAccess::ForObservableJSObjectOffset( 3236 JSBuiltinsObject::OffsetOfFunctionWithId(builtin)); 3237 return Add<HLoadNamedField>( 3238 builtins, static_cast<HValue*>(NULL), function_access); 3239 } 3240 3241 3242 HOptimizedGraphBuilder::HOptimizedGraphBuilder(CompilationInfo* info) 3243 : HGraphBuilder(info), 3244 function_state_(NULL), 3245 initial_function_state_(this, info, NORMAL_RETURN, 0), 3246 ast_context_(NULL), 3247 break_scope_(NULL), 3248 inlined_count_(0), 3249 globals_(10, info->zone()), 3250 inline_bailout_(false), 3251 osr_(new(info->zone()) HOsrBuilder(this)) { 3252 // This is not initialized in the initializer list because the 3253 // constructor for the initial state relies on function_state_ == NULL 3254 // to know it's the initial state. 3255 function_state_= &initial_function_state_; 3256 InitializeAstVisitor(info->zone()); 3257 if (FLAG_hydrogen_track_positions) { 3258 SetSourcePosition(info->shared_info()->start_position()); 3259 } 3260 } 3261 3262 3263 HBasicBlock* HOptimizedGraphBuilder::CreateJoin(HBasicBlock* first, 3264 HBasicBlock* second, 3265 BailoutId join_id) { 3266 if (first == NULL) { 3267 return second; 3268 } else if (second == NULL) { 3269 return first; 3270 } else { 3271 HBasicBlock* join_block = graph()->CreateBasicBlock(); 3272 Goto(first, join_block); 3273 Goto(second, join_block); 3274 join_block->SetJoinId(join_id); 3275 return join_block; 3276 } 3277 } 3278 3279 3280 HBasicBlock* HOptimizedGraphBuilder::JoinContinue(IterationStatement* statement, 3281 HBasicBlock* exit_block, 3282 HBasicBlock* continue_block) { 3283 if (continue_block != NULL) { 3284 if (exit_block != NULL) Goto(exit_block, continue_block); 3285 continue_block->SetJoinId(statement->ContinueId()); 3286 return continue_block; 3287 } 3288 return exit_block; 3289 } 3290 3291 3292 HBasicBlock* HOptimizedGraphBuilder::CreateLoop(IterationStatement* statement, 3293 HBasicBlock* loop_entry, 3294 HBasicBlock* body_exit, 3295 HBasicBlock* loop_successor, 3296 HBasicBlock* break_block) { 3297 if (body_exit != NULL) Goto(body_exit, loop_entry); 3298 loop_entry->PostProcessLoopHeader(statement); 3299 if (break_block != NULL) { 3300 if (loop_successor != NULL) Goto(loop_successor, break_block); 3301 break_block->SetJoinId(statement->ExitId()); 3302 return break_block; 3303 } 3304 return loop_successor; 3305 } 3306 3307 3308 // Build a new loop header block and set it as the current block. 3309 HBasicBlock* HOptimizedGraphBuilder::BuildLoopEntry() { 3310 HBasicBlock* loop_entry = CreateLoopHeaderBlock(); 3311 Goto(loop_entry); 3312 set_current_block(loop_entry); 3313 return loop_entry; 3314 } 3315 3316 3317 HBasicBlock* HOptimizedGraphBuilder::BuildLoopEntry( 3318 IterationStatement* statement) { 3319 HBasicBlock* loop_entry = osr()->HasOsrEntryAt(statement) 3320 ? osr()->BuildOsrLoopEntry(statement) 3321 : BuildLoopEntry(); 3322 return loop_entry; 3323 } 3324 3325 3326 void HBasicBlock::FinishExit(HControlInstruction* instruction, 3327 HSourcePosition position) { 3328 Finish(instruction, position); 3329 ClearEnvironment(); 3330 } 3331 3332 3333 HGraph::HGraph(CompilationInfo* info) 3334 : isolate_(info->isolate()), 3335 next_block_id_(0), 3336 entry_block_(NULL), 3337 blocks_(8, info->zone()), 3338 values_(16, info->zone()), 3339 phi_list_(NULL), 3340 uint32_instructions_(NULL), 3341 osr_(NULL), 3342 info_(info), 3343 zone_(info->zone()), 3344 is_recursive_(false), 3345 use_optimistic_licm_(false), 3346 depends_on_empty_array_proto_elements_(false), 3347 type_change_checksum_(0), 3348 maximum_environment_size_(0), 3349 no_side_effects_scope_count_(0), 3350 disallow_adding_new_values_(false), 3351 next_inline_id_(0), 3352 inlined_functions_(5, info->zone()) { 3353 if (info->IsStub()) { 3354 HydrogenCodeStub* stub = info->code_stub(); 3355 CodeStubInterfaceDescriptor* descriptor = stub->GetInterfaceDescriptor(); 3356 start_environment_ = 3357 new(zone_) HEnvironment(zone_, descriptor->environment_length()); 3358 } else { 3359 TraceInlinedFunction(info->shared_info(), HSourcePosition::Unknown()); 3360 start_environment_ = 3361 new(zone_) HEnvironment(NULL, info->scope(), info->closure(), zone_); 3362 } 3363 start_environment_->set_ast_id(BailoutId::FunctionEntry()); 3364 entry_block_ = CreateBasicBlock(); 3365 entry_block_->SetInitialEnvironment(start_environment_); 3366 } 3367 3368 3369 HBasicBlock* HGraph::CreateBasicBlock() { 3370 HBasicBlock* result = new(zone()) HBasicBlock(this); 3371 blocks_.Add(result, zone()); 3372 return result; 3373 } 3374 3375 3376 void HGraph::FinalizeUniqueness() { 3377 DisallowHeapAllocation no_gc; 3378 ASSERT(!OptimizingCompilerThread::IsOptimizerThread(isolate())); 3379 for (int i = 0; i < blocks()->length(); ++i) { 3380 for (HInstructionIterator it(blocks()->at(i)); !it.Done(); it.Advance()) { 3381 it.Current()->FinalizeUniqueness(); 3382 } 3383 } 3384 } 3385 3386 3387 int HGraph::TraceInlinedFunction( 3388 Handle<SharedFunctionInfo> shared, 3389 HSourcePosition position) { 3390 if (!FLAG_hydrogen_track_positions) { 3391 return 0; 3392 } 3393 3394 int id = 0; 3395 for (; id < inlined_functions_.length(); id++) { 3396 if (inlined_functions_[id].shared().is_identical_to(shared)) { 3397 break; 3398 } 3399 } 3400 3401 if (id == inlined_functions_.length()) { 3402 inlined_functions_.Add(InlinedFunctionInfo(shared), zone()); 3403 3404 if (!shared->script()->IsUndefined()) { 3405 Handle<Script> script(Script::cast(shared->script())); 3406 if (!script->source()->IsUndefined()) { 3407 CodeTracer::Scope tracing_scope(isolate()->GetCodeTracer()); 3408 PrintF(tracing_scope.file(), 3409 "--- FUNCTION SOURCE (%s) id{%d,%d} ---\n", 3410 shared->DebugName()->ToCString().get(), 3411 info()->optimization_id(), 3412 id); 3413 3414 { 3415 ConsStringIteratorOp op; 3416 StringCharacterStream stream(String::cast(script->source()), 3417 &op, 3418 shared->start_position()); 3419 // fun->end_position() points to the last character in the stream. We 3420 // need to compensate by adding one to calculate the length. 3421 int source_len = 3422 shared->end_position() - shared->start_position() + 1; 3423 for (int i = 0; i < source_len; i++) { 3424 if (stream.HasMore()) { 3425 PrintF(tracing_scope.file(), "%c", stream.GetNext()); 3426 } 3427 } 3428 } 3429 3430 PrintF(tracing_scope.file(), "\n--- END ---\n"); 3431 } 3432 } 3433 } 3434 3435 int inline_id = next_inline_id_++; 3436 3437 if (inline_id != 0) { 3438 CodeTracer::Scope tracing_scope(isolate()->GetCodeTracer()); 3439 PrintF(tracing_scope.file(), "INLINE (%s) id{%d,%d} AS %d AT ", 3440 shared->DebugName()->ToCString().get(), 3441 info()->optimization_id(), 3442 id, 3443 inline_id); 3444 position.PrintTo(tracing_scope.file()); 3445 PrintF(tracing_scope.file(), "\n"); 3446 } 3447 3448 return inline_id; 3449 } 3450 3451 3452 int HGraph::SourcePositionToScriptPosition(HSourcePosition pos) { 3453 if (!FLAG_hydrogen_track_positions || pos.IsUnknown()) { 3454 return pos.raw(); 3455 } 3456 3457 return inlined_functions_[pos.inlining_id()].start_position() + 3458 pos.position(); 3459 } 3460 3461 3462 // Block ordering was implemented with two mutually recursive methods, 3463 // HGraph::Postorder and HGraph::PostorderLoopBlocks. 3464 // The recursion could lead to stack overflow so the algorithm has been 3465 // implemented iteratively. 3466 // At a high level the algorithm looks like this: 3467 // 3468 // Postorder(block, loop_header) : { 3469 // if (block has already been visited or is of another loop) return; 3470 // mark block as visited; 3471 // if (block is a loop header) { 3472 // VisitLoopMembers(block, loop_header); 3473 // VisitSuccessorsOfLoopHeader(block); 3474 // } else { 3475 // VisitSuccessors(block) 3476 // } 3477 // put block in result list; 3478 // } 3479 // 3480 // VisitLoopMembers(block, outer_loop_header) { 3481 // foreach (block b in block loop members) { 3482 // VisitSuccessorsOfLoopMember(b, outer_loop_header); 3483 // if (b is loop header) VisitLoopMembers(b); 3484 // } 3485 // } 3486 // 3487 // VisitSuccessorsOfLoopMember(block, outer_loop_header) { 3488 // foreach (block b in block successors) Postorder(b, outer_loop_header) 3489 // } 3490 // 3491 // VisitSuccessorsOfLoopHeader(block) { 3492 // foreach (block b in block successors) Postorder(b, block) 3493 // } 3494 // 3495 // VisitSuccessors(block, loop_header) { 3496 // foreach (block b in block successors) Postorder(b, loop_header) 3497 // } 3498 // 3499 // The ordering is started calling Postorder(entry, NULL). 3500 // 3501 // Each instance of PostorderProcessor represents the "stack frame" of the 3502 // recursion, and particularly keeps the state of the loop (iteration) of the 3503 // "Visit..." function it represents. 3504 // To recycle memory we keep all the frames in a double linked list but 3505 // this means that we cannot use constructors to initialize the frames. 3506 // 3507 class PostorderProcessor : public ZoneObject { 3508 public: 3509 // Back link (towards the stack bottom). 3510 PostorderProcessor* parent() {return father_; } 3511 // Forward link (towards the stack top). 3512 PostorderProcessor* child() {return child_; } 3513 HBasicBlock* block() { return block_; } 3514 HLoopInformation* loop() { return loop_; } 3515 HBasicBlock* loop_header() { return loop_header_; } 3516 3517 static PostorderProcessor* CreateEntryProcessor(Zone* zone, 3518 HBasicBlock* block) { 3519 PostorderProcessor* result = new(zone) PostorderProcessor(NULL); 3520 return result->SetupSuccessors(zone, block, NULL); 3521 } 3522 3523 PostorderProcessor* PerformStep(Zone* zone, 3524 ZoneList<HBasicBlock*>* order) { 3525 PostorderProcessor* next = 3526 PerformNonBacktrackingStep(zone, order); 3527 if (next != NULL) { 3528 return next; 3529 } else { 3530 return Backtrack(zone, order); 3531 } 3532 } 3533 3534 private: 3535 explicit PostorderProcessor(PostorderProcessor* father) 3536 : father_(father), child_(NULL), successor_iterator(NULL) { } 3537 3538 // Each enum value states the cycle whose state is kept by this instance. 3539 enum LoopKind { 3540 NONE, 3541 SUCCESSORS, 3542 SUCCESSORS_OF_LOOP_HEADER, 3543 LOOP_MEMBERS, 3544 SUCCESSORS_OF_LOOP_MEMBER 3545 }; 3546 3547 // Each "Setup..." method is like a constructor for a cycle state. 3548 PostorderProcessor* SetupSuccessors(Zone* zone, 3549 HBasicBlock* block, 3550 HBasicBlock* loop_header) { 3551 if (block == NULL || block->IsOrdered() || 3552 block->parent_loop_header() != loop_header) { 3553 kind_ = NONE; 3554 block_ = NULL; 3555 loop_ = NULL; 3556 loop_header_ = NULL; 3557 return this; 3558 } else { 3559 block_ = block; 3560 loop_ = NULL; 3561 block->MarkAsOrdered(); 3562 3563 if (block->IsLoopHeader()) { 3564 kind_ = SUCCESSORS_OF_LOOP_HEADER; 3565 loop_header_ = block; 3566 InitializeSuccessors(); 3567 PostorderProcessor* result = Push(zone); 3568 return result->SetupLoopMembers(zone, block, block->loop_information(), 3569 loop_header); 3570 } else { 3571 ASSERT(block->IsFinished()); 3572 kind_ = SUCCESSORS; 3573 loop_header_ = loop_header; 3574 InitializeSuccessors(); 3575 return this; 3576 } 3577 } 3578 } 3579 3580 PostorderProcessor* SetupLoopMembers(Zone* zone, 3581 HBasicBlock* block, 3582 HLoopInformation* loop, 3583 HBasicBlock* loop_header) { 3584 kind_ = LOOP_MEMBERS; 3585 block_ = block; 3586 loop_ = loop; 3587 loop_header_ = loop_header; 3588 InitializeLoopMembers(); 3589 return this; 3590 } 3591 3592 PostorderProcessor* SetupSuccessorsOfLoopMember( 3593 HBasicBlock* block, 3594 HLoopInformation* loop, 3595 HBasicBlock* loop_header) { 3596 kind_ = SUCCESSORS_OF_LOOP_MEMBER; 3597 block_ = block; 3598 loop_ = loop; 3599 loop_header_ = loop_header; 3600 InitializeSuccessors(); 3601 return this; 3602 } 3603 3604 // This method "allocates" a new stack frame. 3605 PostorderProcessor* Push(Zone* zone) { 3606 if (child_ == NULL) { 3607 child_ = new(zone) PostorderProcessor(this); 3608 } 3609 return child_; 3610 } 3611 3612 void ClosePostorder(ZoneList<HBasicBlock*>* order, Zone* zone) { 3613 ASSERT(block_->end()->FirstSuccessor() == NULL || 3614 order->Contains(block_->end()->FirstSuccessor()) || 3615 block_->end()->FirstSuccessor()->IsLoopHeader()); 3616 ASSERT(block_->end()->SecondSuccessor() == NULL || 3617 order->Contains(block_->end()->SecondSuccessor()) || 3618 block_->end()->SecondSuccessor()->IsLoopHeader()); 3619 order->Add(block_, zone); 3620 } 3621 3622 // This method is the basic block to walk up the stack. 3623 PostorderProcessor* Pop(Zone* zone, 3624 ZoneList<HBasicBlock*>* order) { 3625 switch (kind_) { 3626 case SUCCESSORS: 3627 case SUCCESSORS_OF_LOOP_HEADER: 3628 ClosePostorder(order, zone); 3629 return father_; 3630 case LOOP_MEMBERS: 3631 return father_; 3632 case SUCCESSORS_OF_LOOP_MEMBER: 3633 if (block()->IsLoopHeader() && block() != loop_->loop_header()) { 3634 // In this case we need to perform a LOOP_MEMBERS cycle so we 3635 // initialize it and return this instead of father. 3636 return SetupLoopMembers(zone, block(), 3637 block()->loop_information(), loop_header_); 3638 } else { 3639 return father_; 3640 } 3641 case NONE: 3642 return father_; 3643 } 3644 UNREACHABLE(); 3645 return NULL; 3646 } 3647 3648 // Walks up the stack. 3649 PostorderProcessor* Backtrack(Zone* zone, 3650 ZoneList<HBasicBlock*>* order) { 3651 PostorderProcessor* parent = Pop(zone, order); 3652 while (parent != NULL) { 3653 PostorderProcessor* next = 3654 parent->PerformNonBacktrackingStep(zone, order); 3655 if (next != NULL) { 3656 return next; 3657 } else { 3658 parent = parent->Pop(zone, order); 3659 } 3660 } 3661 return NULL; 3662 } 3663 3664 PostorderProcessor* PerformNonBacktrackingStep( 3665 Zone* zone, 3666 ZoneList<HBasicBlock*>* order) { 3667 HBasicBlock* next_block; 3668 switch (kind_) { 3669 case SUCCESSORS: 3670 next_block = AdvanceSuccessors(); 3671 if (next_block != NULL) { 3672 PostorderProcessor* result = Push(zone); 3673 return result->SetupSuccessors(zone, next_block, loop_header_); 3674 } 3675 break; 3676 case SUCCESSORS_OF_LOOP_HEADER: 3677 next_block = AdvanceSuccessors(); 3678 if (next_block != NULL) { 3679 PostorderProcessor* result = Push(zone); 3680 return result->SetupSuccessors(zone, next_block, block()); 3681 } 3682 break; 3683 case LOOP_MEMBERS: 3684 next_block = AdvanceLoopMembers(); 3685 if (next_block != NULL) { 3686 PostorderProcessor* result = Push(zone); 3687 return result->SetupSuccessorsOfLoopMember(next_block, 3688 loop_, loop_header_); 3689 } 3690 break; 3691 case SUCCESSORS_OF_LOOP_MEMBER: 3692 next_block = AdvanceSuccessors(); 3693 if (next_block != NULL) { 3694 PostorderProcessor* result = Push(zone); 3695 return result->SetupSuccessors(zone, next_block, loop_header_); 3696 } 3697 break; 3698 case NONE: 3699 return NULL; 3700 } 3701 return NULL; 3702 } 3703 3704 // The following two methods implement a "foreach b in successors" cycle. 3705 void InitializeSuccessors() { 3706 loop_index = 0; 3707 loop_length = 0; 3708 successor_iterator = HSuccessorIterator(block_->end()); 3709 } 3710 3711 HBasicBlock* AdvanceSuccessors() { 3712 if (!successor_iterator.Done()) { 3713 HBasicBlock* result = successor_iterator.Current(); 3714 successor_iterator.Advance(); 3715 return result; 3716 } 3717 return NULL; 3718 } 3719 3720 // The following two methods implement a "foreach b in loop members" cycle. 3721 void InitializeLoopMembers() { 3722 loop_index = 0; 3723 loop_length = loop_->blocks()->length(); 3724 } 3725 3726 HBasicBlock* AdvanceLoopMembers() { 3727 if (loop_index < loop_length) { 3728 HBasicBlock* result = loop_->blocks()->at(loop_index); 3729 loop_index++; 3730 return result; 3731 } else { 3732 return NULL; 3733 } 3734 } 3735 3736 LoopKind kind_; 3737 PostorderProcessor* father_; 3738 PostorderProcessor* child_; 3739 HLoopInformation* loop_; 3740 HBasicBlock* block_; 3741 HBasicBlock* loop_header_; 3742 int loop_index; 3743 int loop_length; 3744 HSuccessorIterator successor_iterator; 3745 }; 3746 3747 3748 void HGraph::OrderBlocks() { 3749 CompilationPhase phase("H_Block ordering", info()); 3750 3751 #ifdef DEBUG 3752 // Initially the blocks must not be ordered. 3753 for (int i = 0; i < blocks_.length(); ++i) { 3754 ASSERT(!blocks_[i]->IsOrdered()); 3755 } 3756 #endif 3757 3758 PostorderProcessor* postorder = 3759 PostorderProcessor::CreateEntryProcessor(zone(), blocks_[0]); 3760 blocks_.Rewind(0); 3761 while (postorder) { 3762 postorder = postorder->PerformStep(zone(), &blocks_); 3763 } 3764 3765 #ifdef DEBUG 3766 // Now all blocks must be marked as ordered. 3767 for (int i = 0; i < blocks_.length(); ++i) { 3768 ASSERT(blocks_[i]->IsOrdered()); 3769 } 3770 #endif 3771 3772 // Reverse block list and assign block IDs. 3773 for (int i = 0, j = blocks_.length(); --j >= i; ++i) { 3774 HBasicBlock* bi = blocks_[i]; 3775 HBasicBlock* bj = blocks_[j]; 3776 bi->set_block_id(j); 3777 bj->set_block_id(i); 3778 blocks_[i] = bj; 3779 blocks_[j] = bi; 3780 } 3781 } 3782 3783 3784 void HGraph::AssignDominators() { 3785 HPhase phase("H_Assign dominators", this); 3786 for (int i = 0; i < blocks_.length(); ++i) { 3787 HBasicBlock* block = blocks_[i]; 3788 if (block->IsLoopHeader()) { 3789 // Only the first predecessor of a loop header is from outside the loop. 3790 // All others are back edges, and thus cannot dominate the loop header. 3791 block->AssignCommonDominator(block->predecessors()->first()); 3792 block->AssignLoopSuccessorDominators(); 3793 } else { 3794 for (int j = blocks_[i]->predecessors()->length() - 1; j >= 0; --j) { 3795 blocks_[i]->AssignCommonDominator(blocks_[i]->predecessors()->at(j)); 3796 } 3797 } 3798 } 3799 } 3800 3801 3802 bool HGraph::CheckArgumentsPhiUses() { 3803 int block_count = blocks_.length(); 3804 for (int i = 0; i < block_count; ++i) { 3805 for (int j = 0; j < blocks_[i]->phis()->length(); ++j) { 3806 HPhi* phi = blocks_[i]->phis()->at(j); 3807 // We don't support phi uses of arguments for now. 3808 if (phi->CheckFlag(HValue::kIsArguments)) return false; 3809 } 3810 } 3811 return true; 3812 } 3813 3814 3815 bool HGraph::CheckConstPhiUses() { 3816 int block_count = blocks_.length(); 3817 for (int i = 0; i < block_count; ++i) { 3818 for (int j = 0; j < blocks_[i]->phis()->length(); ++j) { 3819 HPhi* phi = blocks_[i]->phis()->at(j); 3820 // Check for the hole value (from an uninitialized const). 3821 for (int k = 0; k < phi->OperandCount(); k++) { 3822 if (phi->OperandAt(k) == GetConstantHole()) return false; 3823 } 3824 } 3825 } 3826 return true; 3827 } 3828 3829 3830 void HGraph::CollectPhis() { 3831 int block_count = blocks_.length(); 3832 phi_list_ = new(zone()) ZoneList<HPhi*>(block_count, zone()); 3833 for (int i = 0; i < block_count; ++i) { 3834 for (int j = 0; j < blocks_[i]->phis()->length(); ++j) { 3835 HPhi* phi = blocks_[i]->phis()->at(j); 3836 phi_list_->Add(phi, zone()); 3837 } 3838 } 3839 } 3840 3841 3842 // Implementation of utility class to encapsulate the translation state for 3843 // a (possibly inlined) function. 3844 FunctionState::FunctionState(HOptimizedGraphBuilder* owner, 3845 CompilationInfo* info, 3846 InliningKind inlining_kind, 3847 int inlining_id) 3848 : owner_(owner), 3849 compilation_info_(info), 3850 call_context_(NULL), 3851 inlining_kind_(inlining_kind), 3852 function_return_(NULL), 3853 test_context_(NULL), 3854 entry_(NULL), 3855 arguments_object_(NULL), 3856 arguments_elements_(NULL), 3857 inlining_id_(inlining_id), 3858 outer_source_position_(HSourcePosition::Unknown()), 3859 outer_(owner->function_state()) { 3860 if (outer_ != NULL) { 3861 // State for an inline function. 3862 if (owner->ast_context()->IsTest()) { 3863 HBasicBlock* if_true = owner->graph()->CreateBasicBlock(); 3864 HBasicBlock* if_false = owner->graph()->CreateBasicBlock(); 3865 if_true->MarkAsInlineReturnTarget(owner->current_block()); 3866 if_false->MarkAsInlineReturnTarget(owner->current_block()); 3867 TestContext* outer_test_context = TestContext::cast(owner->ast_context()); 3868 Expression* cond = outer_test_context->condition(); 3869 // The AstContext constructor pushed on the context stack. This newed 3870 // instance is the reason that AstContext can't be BASE_EMBEDDED. 3871 test_context_ = new TestContext(owner, cond, if_true, if_false); 3872 } else { 3873 function_return_ = owner->graph()->CreateBasicBlock(); 3874 function_return()->MarkAsInlineReturnTarget(owner->current_block()); 3875 } 3876 // Set this after possibly allocating a new TestContext above. 3877 call_context_ = owner->ast_context(); 3878 } 3879 3880 // Push on the state stack. 3881 owner->set_function_state(this); 3882 3883 if (FLAG_hydrogen_track_positions) { 3884 outer_source_position_ = owner->source_position(); 3885 owner->EnterInlinedSource( 3886 info->shared_info()->start_position(), 3887 inlining_id); 3888 owner->SetSourcePosition(info->shared_info()->start_position()); 3889 } 3890 } 3891 3892 3893 FunctionState::~FunctionState() { 3894 delete test_context_; 3895 owner_->set_function_state(outer_); 3896 3897 if (FLAG_hydrogen_track_positions) { 3898 owner_->set_source_position(outer_source_position_); 3899 owner_->EnterInlinedSource( 3900 outer_->compilation_info()->shared_info()->start_position(), 3901 outer_->inlining_id()); 3902 } 3903 } 3904 3905 3906 // Implementation of utility classes to represent an expression's context in 3907 // the AST. 3908 AstContext::AstContext(HOptimizedGraphBuilder* owner, Expression::Context kind) 3909 : owner_(owner), 3910 kind_(kind), 3911 outer_(owner->ast_context()), 3912 for_typeof_(false) { 3913 owner->set_ast_context(this); // Push. 3914 #ifdef DEBUG 3915 ASSERT(owner->environment()->frame_type() == JS_FUNCTION); 3916 original_length_ = owner->environment()->length(); 3917 #endif 3918 } 3919 3920 3921 AstContext::~AstContext() { 3922 owner_->set_ast_context(outer_); // Pop. 3923 } 3924 3925 3926 EffectContext::~EffectContext() { 3927 ASSERT(owner()->HasStackOverflow() || 3928 owner()->current_block() == NULL || 3929 (owner()->environment()->length() == original_length_ && 3930 owner()->environment()->frame_type() == JS_FUNCTION)); 3931 } 3932 3933 3934 ValueContext::~ValueContext() { 3935 ASSERT(owner()->HasStackOverflow() || 3936 owner()->current_block() == NULL || 3937 (owner()->environment()->length() == original_length_ + 1 && 3938 owner()->environment()->frame_type() == JS_FUNCTION)); 3939 } 3940 3941 3942 void EffectContext::ReturnValue(HValue* value) { 3943 // The value is simply ignored. 3944 } 3945 3946 3947 void ValueContext::ReturnValue(HValue* value) { 3948 // The value is tracked in the bailout environment, and communicated 3949 // through the environment as the result of the expression. 3950 if (!arguments_allowed() && value->CheckFlag(HValue::kIsArguments)) { 3951 owner()->Bailout(kBadValueContextForArgumentsValue); 3952 } 3953 owner()->Push(value); 3954 } 3955 3956 3957 void TestContext::ReturnValue(HValue* value) { 3958 BuildBranch(value); 3959 } 3960 3961 3962 void EffectContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) { 3963 ASSERT(!instr->IsControlInstruction()); 3964 owner()->AddInstruction(instr); 3965 if (instr->HasObservableSideEffects()) { 3966 owner()->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE); 3967 } 3968 } 3969 3970 3971 void EffectContext::ReturnControl(HControlInstruction* instr, 3972 BailoutId ast_id) { 3973 ASSERT(!instr->HasObservableSideEffects()); 3974 HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock(); 3975 HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock(); 3976 instr->SetSuccessorAt(0, empty_true); 3977 instr->SetSuccessorAt(1, empty_false); 3978 owner()->FinishCurrentBlock(instr); 3979 HBasicBlock* join = owner()->CreateJoin(empty_true, empty_false, ast_id); 3980 owner()->set_current_block(join); 3981 } 3982 3983 3984 void EffectContext::ReturnContinuation(HIfContinuation* continuation, 3985 BailoutId ast_id) { 3986 HBasicBlock* true_branch = NULL; 3987 HBasicBlock* false_branch = NULL; 3988 continuation->Continue(&true_branch, &false_branch); 3989 if (!continuation->IsTrueReachable()) { 3990 owner()->set_current_block(false_branch); 3991 } else if (!continuation->IsFalseReachable()) { 3992 owner()->set_current_block(true_branch); 3993 } else { 3994 HBasicBlock* join = owner()->CreateJoin(true_branch, false_branch, ast_id); 3995 owner()->set_current_block(join); 3996 } 3997 } 3998 3999 4000 void ValueContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) { 4001 ASSERT(!instr->IsControlInstruction()); 4002 if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) { 4003 return owner()->Bailout(kBadValueContextForArgumentsObjectValue); 4004 } 4005 owner()->AddInstruction(instr); 4006 owner()->Push(instr); 4007 if (instr->HasObservableSideEffects()) { 4008 owner()->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE); 4009 } 4010 } 4011 4012 4013 void ValueContext::ReturnControl(HControlInstruction* instr, BailoutId ast_id) { 4014 ASSERT(!instr->HasObservableSideEffects()); 4015 if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) { 4016 return owner()->Bailout(kBadValueContextForArgumentsObjectValue); 4017 } 4018 HBasicBlock* materialize_false = owner()->graph()->CreateBasicBlock(); 4019 HBasicBlock* materialize_true = owner()->graph()->CreateBasicBlock(); 4020 instr->SetSuccessorAt(0, materialize_true); 4021 instr->SetSuccessorAt(1, materialize_false); 4022 owner()->FinishCurrentBlock(instr); 4023 owner()->set_current_block(materialize_true); 4024 owner()->Push(owner()->graph()->GetConstantTrue()); 4025 owner()->set_current_block(materialize_false); 4026 owner()->Push(owner()->graph()->GetConstantFalse()); 4027 HBasicBlock* join = 4028 owner()->CreateJoin(materialize_true, materialize_false, ast_id); 4029 owner()->set_current_block(join); 4030 } 4031 4032 4033 void ValueContext::ReturnContinuation(HIfContinuation* continuation, 4034 BailoutId ast_id) { 4035 HBasicBlock* materialize_true = NULL; 4036 HBasicBlock* materialize_false = NULL; 4037 continuation->Continue(&materialize_true, &materialize_false); 4038 if (continuation->IsTrueReachable()) { 4039 owner()->set_current_block(materialize_true); 4040 owner()->Push(owner()->graph()->GetConstantTrue()); 4041 owner()->set_current_block(materialize_true); 4042 } 4043 if (continuation->IsFalseReachable()) { 4044 owner()->set_current_block(materialize_false); 4045 owner()->Push(owner()->graph()->GetConstantFalse()); 4046 owner()->set_current_block(materialize_false); 4047 } 4048 if (continuation->TrueAndFalseReachable()) { 4049 HBasicBlock* join = 4050 owner()->CreateJoin(materialize_true, materialize_false, ast_id); 4051 owner()->set_current_block(join); 4052 } 4053 } 4054 4055 4056 void TestContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) { 4057 ASSERT(!instr->IsControlInstruction()); 4058 HOptimizedGraphBuilder* builder = owner(); 4059 builder->AddInstruction(instr); 4060 // We expect a simulate after every expression with side effects, though 4061 // this one isn't actually needed (and wouldn't work if it were targeted). 4062 if (instr->HasObservableSideEffects()) { 4063 builder->Push(instr); 4064 builder->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE); 4065 builder->Pop(); 4066 } 4067 BuildBranch(instr); 4068 } 4069 4070 4071 void TestContext::ReturnControl(HControlInstruction* instr, BailoutId ast_id) { 4072 ASSERT(!instr->HasObservableSideEffects()); 4073 HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock(); 4074 HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock(); 4075 instr->SetSuccessorAt(0, empty_true); 4076 instr->SetSuccessorAt(1, empty_false); 4077 owner()->FinishCurrentBlock(instr); 4078 owner()->Goto(empty_true, if_true(), owner()->function_state()); 4079 owner()->Goto(empty_false, if_false(), owner()->function_state()); 4080 owner()->set_current_block(NULL); 4081 } 4082 4083 4084 void TestContext::ReturnContinuation(HIfContinuation* continuation, 4085 BailoutId ast_id) { 4086 HBasicBlock* true_branch = NULL; 4087 HBasicBlock* false_branch = NULL; 4088 continuation->Continue(&true_branch, &false_branch); 4089 if (continuation->IsTrueReachable()) { 4090 owner()->Goto(true_branch, if_true(), owner()->function_state()); 4091 } 4092 if (continuation->IsFalseReachable()) { 4093 owner()->Goto(false_branch, if_false(), owner()->function_state()); 4094 } 4095 owner()->set_current_block(NULL); 4096 } 4097 4098 4099 void TestContext::BuildBranch(HValue* value) { 4100 // We expect the graph to be in edge-split form: there is no edge that 4101 // connects a branch node to a join node. We conservatively ensure that 4102 // property by always adding an empty block on the outgoing edges of this 4103 // branch. 4104 HOptimizedGraphBuilder* builder = owner(); 4105 if (value != NULL && value->CheckFlag(HValue::kIsArguments)) { 4106 builder->Bailout(kArgumentsObjectValueInATestContext); 4107 } 4108 ToBooleanStub::Types expected(condition()->to_boolean_types()); 4109 ReturnControl(owner()->New<HBranch>(value, expected), BailoutId::None()); 4110 } 4111 4112 4113 // HOptimizedGraphBuilder infrastructure for bailing out and checking bailouts. 4114 #define CHECK_BAILOUT(call) \ 4115 do { \ 4116 call; \ 4117 if (HasStackOverflow()) return; \ 4118 } while (false) 4119 4120 4121 #define CHECK_ALIVE(call) \ 4122 do { \ 4123 call; \ 4124 if (HasStackOverflow() || current_block() == NULL) return; \ 4125 } while (false) 4126 4127 4128 #define CHECK_ALIVE_OR_RETURN(call, value) \ 4129 do { \ 4130 call; \ 4131 if (HasStackOverflow() || current_block() == NULL) return value; \ 4132 } while (false) 4133 4134 4135 void HOptimizedGraphBuilder::Bailout(BailoutReason reason) { 4136 current_info()->set_bailout_reason(reason); 4137 SetStackOverflow(); 4138 } 4139 4140 4141 void HOptimizedGraphBuilder::VisitForEffect(Expression* expr) { 4142 EffectContext for_effect(this); 4143 Visit(expr); 4144 } 4145 4146 4147 void HOptimizedGraphBuilder::VisitForValue(Expression* expr, 4148 ArgumentsAllowedFlag flag) { 4149 ValueContext for_value(this, flag); 4150 Visit(expr); 4151 } 4152 4153 4154 void HOptimizedGraphBuilder::VisitForTypeOf(Expression* expr) { 4155 ValueContext for_value(this, ARGUMENTS_NOT_ALLOWED); 4156 for_value.set_for_typeof(true); 4157 Visit(expr); 4158 } 4159 4160 4161 void HOptimizedGraphBuilder::VisitForControl(Expression* expr, 4162 HBasicBlock* true_block, 4163 HBasicBlock* false_block) { 4164 TestContext for_test(this, expr, true_block, false_block); 4165 Visit(expr); 4166 } 4167 4168 4169 void HOptimizedGraphBuilder::VisitExpressions( 4170 ZoneList<Expression*>* exprs) { 4171 for (int i = 0; i < exprs->length(); ++i) { 4172 CHECK_ALIVE(VisitForValue(exprs->at(i))); 4173 } 4174 } 4175 4176 4177 bool HOptimizedGraphBuilder::BuildGraph() { 4178 if (current_info()->function()->is_generator()) { 4179 Bailout(kFunctionIsAGenerator); 4180 return false; 4181 } 4182 Scope* scope = current_info()->scope(); 4183 if (scope->HasIllegalRedeclaration()) { 4184 Bailout(kFunctionWithIllegalRedeclaration); 4185 return false; 4186 } 4187 if (scope->calls_eval()) { 4188 Bailout(kFunctionCallsEval); 4189 return false; 4190 } 4191 SetUpScope(scope); 4192 4193 // Add an edge to the body entry. This is warty: the graph's start 4194 // environment will be used by the Lithium translation as the initial 4195 // environment on graph entry, but it has now been mutated by the 4196 // Hydrogen translation of the instructions in the start block. This 4197 // environment uses values which have not been defined yet. These 4198 // Hydrogen instructions will then be replayed by the Lithium 4199 // translation, so they cannot have an environment effect. The edge to 4200 // the body's entry block (along with some special logic for the start 4201 // block in HInstruction::InsertAfter) seals the start block from 4202 // getting unwanted instructions inserted. 4203 // 4204 // TODO(kmillikin): Fix this. Stop mutating the initial environment. 4205 // Make the Hydrogen instructions in the initial block into Hydrogen 4206 // values (but not instructions), present in the initial environment and 4207 // not replayed by the Lithium translation. 4208 HEnvironment* initial_env = environment()->CopyWithoutHistory(); 4209 HBasicBlock* body_entry = CreateBasicBlock(initial_env); 4210 Goto(body_entry); 4211 body_entry->SetJoinId(BailoutId::FunctionEntry()); 4212 set_current_block(body_entry); 4213 4214 // Handle implicit declaration of the function name in named function 4215 // expressions before other declarations. 4216 if (scope->is_function_scope() && scope->function() != NULL) { 4217 VisitVariableDeclaration(scope->function()); 4218 } 4219 VisitDeclarations(scope->declarations()); 4220 Add<HSimulate>(BailoutId::Declarations()); 4221 4222 Add<HStackCheck>(HStackCheck::kFunctionEntry); 4223 4224 VisitStatements(current_info()->function()->body()); 4225 if (HasStackOverflow()) return false; 4226 4227 if (current_block() != NULL) { 4228 Add<HReturn>(graph()->GetConstantUndefined()); 4229 set_current_block(NULL); 4230 } 4231 4232 // If the checksum of the number of type info changes is the same as the 4233 // last time this function was compiled, then this recompile is likely not 4234 // due to missing/inadequate type feedback, but rather too aggressive 4235 // optimization. Disable optimistic LICM in that case. 4236 Handle<Code> unoptimized_code(current_info()->shared_info()->code()); 4237 ASSERT(unoptimized_code->kind() == Code::FUNCTION); 4238 Handle<TypeFeedbackInfo> type_info( 4239 TypeFeedbackInfo::cast(unoptimized_code->type_feedback_info())); 4240 int checksum = type_info->own_type_change_checksum(); 4241 int composite_checksum = graph()->update_type_change_checksum(checksum); 4242 graph()->set_use_optimistic_licm( 4243 !type_info->matches_inlined_type_change_checksum(composite_checksum)); 4244 type_info->set_inlined_type_change_checksum(composite_checksum); 4245 4246 // Perform any necessary OSR-specific cleanups or changes to the graph. 4247 osr()->FinishGraph(); 4248 4249 return true; 4250 } 4251 4252 4253 bool HGraph::Optimize(BailoutReason* bailout_reason) { 4254 OrderBlocks(); 4255 AssignDominators(); 4256 4257 // We need to create a HConstant "zero" now so that GVN will fold every 4258 // zero-valued constant in the graph together. 4259 // The constant is needed to make idef-based bounds check work: the pass 4260 // evaluates relations with "zero" and that zero cannot be created after GVN. 4261 GetConstant0(); 4262 4263 #ifdef DEBUG 4264 // Do a full verify after building the graph and computing dominators. 4265 Verify(true); 4266 #endif 4267 4268 if (FLAG_analyze_environment_liveness && maximum_environment_size() != 0) { 4269 Run<HEnvironmentLivenessAnalysisPhase>(); 4270 } 4271 4272 if (!CheckConstPhiUses()) { 4273 *bailout_reason = kUnsupportedPhiUseOfConstVariable; 4274 return false; 4275 } 4276 Run<HRedundantPhiEliminationPhase>(); 4277 if (!CheckArgumentsPhiUses()) { 4278 *bailout_reason = kUnsupportedPhiUseOfArguments; 4279 return false; 4280 } 4281 4282 // Find and mark unreachable code to simplify optimizations, especially gvn, 4283 // where unreachable code could unnecessarily defeat LICM. 4284 Run<HMarkUnreachableBlocksPhase>(); 4285 4286 if (FLAG_dead_code_elimination) Run<HDeadCodeEliminationPhase>(); 4287 if (FLAG_use_escape_analysis) Run<HEscapeAnalysisPhase>(); 4288 4289 if (FLAG_load_elimination) Run<HLoadEliminationPhase>(); 4290 4291 CollectPhis(); 4292 4293 if (has_osr()) osr()->FinishOsrValues(); 4294 4295 Run<HInferRepresentationPhase>(); 4296 4297 // Remove HSimulate instructions that have turned out not to be needed 4298 // after all by folding them into the following HSimulate. 4299 // This must happen after inferring representations. 4300 Run<HMergeRemovableSimulatesPhase>(); 4301 4302 Run<HMarkDeoptimizeOnUndefinedPhase>(); 4303 Run<HRepresentationChangesPhase>(); 4304 4305 Run<HInferTypesPhase>(); 4306 4307 // Must be performed before canonicalization to ensure that Canonicalize 4308 // will not remove semantically meaningful ToInt32 operations e.g. BIT_OR with 4309 // zero. 4310 if (FLAG_opt_safe_uint32_operations) Run<HUint32AnalysisPhase>(); 4311 4312 if (FLAG_use_canonicalizing) Run<HCanonicalizePhase>(); 4313 4314 if (FLAG_use_gvn) Run<HGlobalValueNumberingPhase>(); 4315 4316 if (FLAG_check_elimination) Run<HCheckEliminationPhase>(); 4317 4318 if (FLAG_store_elimination) Run<HStoreEliminationPhase>(); 4319 4320 Run<HRangeAnalysisPhase>(); 4321 4322 Run<HComputeChangeUndefinedToNaN>(); 4323 4324 // Eliminate redundant stack checks on backwards branches. 4325 Run<HStackCheckEliminationPhase>(); 4326 4327 if (FLAG_array_bounds_checks_elimination) Run<HBoundsCheckEliminationPhase>(); 4328 if (FLAG_array_bounds_checks_hoisting) Run<HBoundsCheckHoistingPhase>(); 4329 if (FLAG_array_index_dehoisting) Run<HDehoistIndexComputationsPhase>(); 4330 if (FLAG_dead_code_elimination) Run<HDeadCodeEliminationPhase>(); 4331 4332 RestoreActualValues(); 4333 4334 // Find unreachable code a second time, GVN and other optimizations may have 4335 // made blocks unreachable that were previously reachable. 4336 Run<HMarkUnreachableBlocksPhase>(); 4337 4338 return true; 4339 } 4340 4341 4342 void HGraph::RestoreActualValues() { 4343 HPhase phase("H_Restore actual values", this); 4344 4345 for (int block_index = 0; block_index < blocks()->length(); block_index++) { 4346 HBasicBlock* block = blocks()->at(block_index); 4347 4348 #ifdef DEBUG 4349 for (int i = 0; i < block->phis()->length(); i++) { 4350 HPhi* phi = block->phis()->at(i); 4351 ASSERT(phi->ActualValue() == phi); 4352 } 4353 #endif 4354 4355 for (HInstructionIterator it(block); !it.Done(); it.Advance()) { 4356 HInstruction* instruction = it.Current(); 4357 if (instruction->ActualValue() == instruction) continue; 4358 if (instruction->CheckFlag(HValue::kIsDead)) { 4359 // The instruction was marked as deleted but left in the graph 4360 // as a control flow dependency point for subsequent 4361 // instructions. 4362 instruction->DeleteAndReplaceWith(instruction->ActualValue()); 4363 } else { 4364 ASSERT(instruction->IsInformativeDefinition()); 4365 if (instruction->IsPurelyInformativeDefinition()) { 4366 instruction->DeleteAndReplaceWith(instruction->RedefinedOperand()); 4367 } else { 4368 instruction->ReplaceAllUsesWith(instruction->ActualValue()); 4369 } 4370 } 4371 } 4372 } 4373 } 4374 4375 4376 void HOptimizedGraphBuilder::PushArgumentsFromEnvironment(int count) { 4377 ZoneList<HValue*> arguments(count, zone()); 4378 for (int i = 0; i < count; ++i) { 4379 arguments.Add(Pop(), zone()); 4380 } 4381 4382 HPushArguments* push_args = New<HPushArguments>(); 4383 while (!arguments.is_empty()) { 4384 push_args->AddInput(arguments.RemoveLast()); 4385 } 4386 AddInstruction(push_args); 4387 } 4388 4389 4390 template <class Instruction> 4391 HInstruction* HOptimizedGraphBuilder::PreProcessCall(Instruction* call) { 4392 PushArgumentsFromEnvironment(call->argument_count()); 4393 return call; 4394 } 4395 4396 4397 void HOptimizedGraphBuilder::SetUpScope(Scope* scope) { 4398 // First special is HContext. 4399 HInstruction* context = Add<HContext>(); 4400 environment()->BindContext(context); 4401 4402 // Create an arguments object containing the initial parameters. Set the 4403 // initial values of parameters including "this" having parameter index 0. 4404 ASSERT_EQ(scope->num_parameters() + 1, environment()->parameter_count()); 4405 HArgumentsObject* arguments_object = 4406 New<HArgumentsObject>(environment()->parameter_count()); 4407 for (int i = 0; i < environment()->parameter_count(); ++i) { 4408 HInstruction* parameter = Add<HParameter>(i); 4409 arguments_object->AddArgument(parameter, zone()); 4410 environment()->Bind(i, parameter); 4411 } 4412 AddInstruction(arguments_object); 4413 graph()->SetArgumentsObject(arguments_object); 4414 4415 HConstant* undefined_constant = graph()->GetConstantUndefined(); 4416 // Initialize specials and locals to undefined. 4417 for (int i = environment()->parameter_count() + 1; 4418 i < environment()->length(); 4419 ++i) { 4420 environment()->Bind(i, undefined_constant); 4421 } 4422 4423 // Handle the arguments and arguments shadow variables specially (they do 4424 // not have declarations). 4425 if (scope->arguments() != NULL) { 4426 if (!scope->arguments()->IsStackAllocated()) { 4427 return Bailout(kContextAllocatedArguments); 4428 } 4429 4430 environment()->Bind(scope->arguments(), 4431 graph()->GetArgumentsObject()); 4432 } 4433 } 4434 4435 4436 void HOptimizedGraphBuilder::VisitStatements(ZoneList<Statement*>* statements) { 4437 for (int i = 0; i < statements->length(); i++) { 4438 Statement* stmt = statements->at(i); 4439 CHECK_ALIVE(Visit(stmt)); 4440 if (stmt->IsJump()) break; 4441 } 4442 } 4443 4444 4445 void HOptimizedGraphBuilder::VisitBlock(Block* stmt) { 4446 ASSERT(!HasStackOverflow()); 4447 ASSERT(current_block() != NULL); 4448 ASSERT(current_block()->HasPredecessor()); 4449 4450 Scope* outer_scope = scope(); 4451 Scope* scope = stmt->scope(); 4452 BreakAndContinueInfo break_info(stmt, outer_scope); 4453 4454 { BreakAndContinueScope push(&break_info, this); 4455 if (scope != NULL) { 4456 // Load the function object. 4457 Scope* declaration_scope = scope->DeclarationScope(); 4458 HInstruction* function; 4459 HValue* outer_context = environment()->context(); 4460 if (declaration_scope->is_global_scope() || 4461 declaration_scope->is_eval_scope()) { 4462 function = new(zone()) HLoadContextSlot( 4463 outer_context, Context::CLOSURE_INDEX, HLoadContextSlot::kNoCheck); 4464 } else { 4465 function = New<HThisFunction>(); 4466 } 4467 AddInstruction(function); 4468 // Allocate a block context and store it to the stack frame. 4469 HInstruction* inner_context = Add<HAllocateBlockContext>( 4470 outer_context, function, scope->GetScopeInfo()); 4471 HInstruction* instr = Add<HStoreFrameContext>(inner_context); 4472 if (instr->HasObservableSideEffects()) { 4473 AddSimulate(stmt->EntryId(), REMOVABLE_SIMULATE); 4474 } 4475 set_scope(scope); 4476 environment()->BindContext(inner_context); 4477 VisitDeclarations(scope->declarations()); 4478 AddSimulate(stmt->DeclsId(), REMOVABLE_SIMULATE); 4479 } 4480 CHECK_BAILOUT(VisitStatements(stmt->statements())); 4481 } 4482 set_scope(outer_scope); 4483 if (scope != NULL && current_block() != NULL) { 4484 HValue* inner_context = environment()->context(); 4485 HValue* outer_context = Add<HLoadNamedField>( 4486 inner_context, static_cast<HValue*>(NULL), 4487 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX)); 4488 4489 HInstruction* instr = Add<HStoreFrameContext>(outer_context); 4490 if (instr->HasObservableSideEffects()) { 4491 AddSimulate(stmt->ExitId(), REMOVABLE_SIMULATE); 4492 } 4493 environment()->BindContext(outer_context); 4494 } 4495 HBasicBlock* break_block = break_info.break_block(); 4496 if (break_block != NULL) { 4497 if (current_block() != NULL) Goto(break_block); 4498 break_block->SetJoinId(stmt->ExitId()); 4499 set_current_block(break_block); 4500 } 4501 } 4502 4503 4504 void HOptimizedGraphBuilder::VisitExpressionStatement( 4505 ExpressionStatement* stmt) { 4506 ASSERT(!HasStackOverflow()); 4507 ASSERT(current_block() != NULL); 4508 ASSERT(current_block()->HasPredecessor()); 4509 VisitForEffect(stmt->expression()); 4510 } 4511 4512 4513 void HOptimizedGraphBuilder::VisitEmptyStatement(EmptyStatement* stmt) { 4514 ASSERT(!HasStackOverflow()); 4515 ASSERT(current_block() != NULL); 4516 ASSERT(current_block()->HasPredecessor()); 4517 } 4518 4519 4520 void HOptimizedGraphBuilder::VisitIfStatement(IfStatement* stmt) { 4521 ASSERT(!HasStackOverflow()); 4522 ASSERT(current_block() != NULL); 4523 ASSERT(current_block()->HasPredecessor()); 4524 if (stmt->condition()->ToBooleanIsTrue()) { 4525 Add<HSimulate>(stmt->ThenId()); 4526 Visit(stmt->then_statement()); 4527 } else if (stmt->condition()->ToBooleanIsFalse()) { 4528 Add<HSimulate>(stmt->ElseId()); 4529 Visit(stmt->else_statement()); 4530 } else { 4531 HBasicBlock* cond_true = graph()->CreateBasicBlock(); 4532 HBasicBlock* cond_false = graph()->CreateBasicBlock(); 4533 CHECK_BAILOUT(VisitForControl(stmt->condition(), cond_true, cond_false)); 4534 4535 if (cond_true->HasPredecessor()) { 4536 cond_true->SetJoinId(stmt->ThenId()); 4537 set_current_block(cond_true); 4538 CHECK_BAILOUT(Visit(stmt->then_statement())); 4539 cond_true = current_block(); 4540 } else { 4541 cond_true = NULL; 4542 } 4543 4544 if (cond_false->HasPredecessor()) { 4545 cond_false->SetJoinId(stmt->ElseId()); 4546 set_current_block(cond_false); 4547 CHECK_BAILOUT(Visit(stmt->else_statement())); 4548 cond_false = current_block(); 4549 } else { 4550 cond_false = NULL; 4551 } 4552 4553 HBasicBlock* join = CreateJoin(cond_true, cond_false, stmt->IfId()); 4554 set_current_block(join); 4555 } 4556 } 4557 4558 4559 HBasicBlock* HOptimizedGraphBuilder::BreakAndContinueScope::Get( 4560 BreakableStatement* stmt, 4561 BreakType type, 4562 Scope** scope, 4563 int* drop_extra) { 4564 *drop_extra = 0; 4565 BreakAndContinueScope* current = this; 4566 while (current != NULL && current->info()->target() != stmt) { 4567 *drop_extra += current->info()->drop_extra(); 4568 current = current->next(); 4569 } 4570 ASSERT(current != NULL); // Always found (unless stack is malformed). 4571 *scope = current->info()->scope(); 4572 4573 if (type == BREAK) { 4574 *drop_extra += current->info()->drop_extra(); 4575 } 4576 4577 HBasicBlock* block = NULL; 4578 switch (type) { 4579 case BREAK: 4580 block = current->info()->break_block(); 4581 if (block == NULL) { 4582 block = current->owner()->graph()->CreateBasicBlock(); 4583 current->info()->set_break_block(block); 4584 } 4585 break; 4586 4587 case CONTINUE: 4588 block = current->info()->continue_block(); 4589 if (block == NULL) { 4590 block = current->owner()->graph()->CreateBasicBlock(); 4591 current->info()->set_continue_block(block); 4592 } 4593 break; 4594 } 4595 4596 return block; 4597 } 4598 4599 4600 void HOptimizedGraphBuilder::VisitContinueStatement( 4601 ContinueStatement* stmt) { 4602 ASSERT(!HasStackOverflow()); 4603 ASSERT(current_block() != NULL); 4604 ASSERT(current_block()->HasPredecessor()); 4605 Scope* outer_scope = NULL; 4606 Scope* inner_scope = scope(); 4607 int drop_extra = 0; 4608 HBasicBlock* continue_block = break_scope()->Get( 4609 stmt->target(), BreakAndContinueScope::CONTINUE, 4610 &outer_scope, &drop_extra); 4611 HValue* context = environment()->context(); 4612 Drop(drop_extra); 4613 int context_pop_count = inner_scope->ContextChainLength(outer_scope); 4614 if (context_pop_count > 0) { 4615 while (context_pop_count-- > 0) { 4616 HInstruction* context_instruction = Add<HLoadNamedField>( 4617 context, static_cast<HValue*>(NULL), 4618 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX)); 4619 context = context_instruction; 4620 } 4621 HInstruction* instr = Add<HStoreFrameContext>(context); 4622 if (instr->HasObservableSideEffects()) { 4623 AddSimulate(stmt->target()->EntryId(), REMOVABLE_SIMULATE); 4624 } 4625 environment()->BindContext(context); 4626 } 4627 4628 Goto(continue_block); 4629 set_current_block(NULL); 4630 } 4631 4632 4633 void HOptimizedGraphBuilder::VisitBreakStatement(BreakStatement* stmt) { 4634 ASSERT(!HasStackOverflow()); 4635 ASSERT(current_block() != NULL); 4636 ASSERT(current_block()->HasPredecessor()); 4637 Scope* outer_scope = NULL; 4638 Scope* inner_scope = scope(); 4639 int drop_extra = 0; 4640 HBasicBlock* break_block = break_scope()->Get( 4641 stmt->target(), BreakAndContinueScope::BREAK, 4642 &outer_scope, &drop_extra); 4643 HValue* context = environment()->context(); 4644 Drop(drop_extra); 4645 int context_pop_count = inner_scope->ContextChainLength(outer_scope); 4646 if (context_pop_count > 0) { 4647 while (context_pop_count-- > 0) { 4648 HInstruction* context_instruction = Add<HLoadNamedField>( 4649 context, static_cast<HValue*>(NULL), 4650 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX)); 4651 context = context_instruction; 4652 } 4653 HInstruction* instr = Add<HStoreFrameContext>(context); 4654 if (instr->HasObservableSideEffects()) { 4655 AddSimulate(stmt->target()->ExitId(), REMOVABLE_SIMULATE); 4656 } 4657 environment()->BindContext(context); 4658 } 4659 Goto(break_block); 4660 set_current_block(NULL); 4661 } 4662 4663 4664 void HOptimizedGraphBuilder::VisitReturnStatement(ReturnStatement* stmt) { 4665 ASSERT(!HasStackOverflow()); 4666 ASSERT(current_block() != NULL); 4667 ASSERT(current_block()->HasPredecessor()); 4668 FunctionState* state = function_state(); 4669 AstContext* context = call_context(); 4670 if (context == NULL) { 4671 // Not an inlined return, so an actual one. 4672 CHECK_ALIVE(VisitForValue(stmt->expression())); 4673 HValue* result = environment()->Pop(); 4674 Add<HReturn>(result); 4675 } else if (state->inlining_kind() == CONSTRUCT_CALL_RETURN) { 4676 // Return from an inlined construct call. In a test context the return value 4677 // will always evaluate to true, in a value context the return value needs 4678 // to be a JSObject. 4679 if (context->IsTest()) { 4680 TestContext* test = TestContext::cast(context); 4681 CHECK_ALIVE(VisitForEffect(stmt->expression())); 4682 Goto(test->if_true(), state); 4683 } else if (context->IsEffect()) { 4684 CHECK_ALIVE(VisitForEffect(stmt->expression())); 4685 Goto(function_return(), state); 4686 } else { 4687 ASSERT(context->IsValue()); 4688 CHECK_ALIVE(VisitForValue(stmt->expression())); 4689 HValue* return_value = Pop(); 4690 HValue* receiver = environment()->arguments_environment()->Lookup(0); 4691 HHasInstanceTypeAndBranch* typecheck = 4692 New<HHasInstanceTypeAndBranch>(return_value, 4693 FIRST_SPEC_OBJECT_TYPE, 4694 LAST_SPEC_OBJECT_TYPE); 4695 HBasicBlock* if_spec_object = graph()->CreateBasicBlock(); 4696 HBasicBlock* not_spec_object = graph()->CreateBasicBlock(); 4697 typecheck->SetSuccessorAt(0, if_spec_object); 4698 typecheck->SetSuccessorAt(1, not_spec_object); 4699 FinishCurrentBlock(typecheck); 4700 AddLeaveInlined(if_spec_object, return_value, state); 4701 AddLeaveInlined(not_spec_object, receiver, state); 4702 } 4703 } else if (state->inlining_kind() == SETTER_CALL_RETURN) { 4704 // Return from an inlined setter call. The returned value is never used, the 4705 // value of an assignment is always the value of the RHS of the assignment. 4706 CHECK_ALIVE(VisitForEffect(stmt->expression())); 4707 if (context->IsTest()) { 4708 HValue* rhs = environment()->arguments_environment()->Lookup(1); 4709 context->ReturnValue(rhs); 4710 } else if (context->IsEffect()) { 4711 Goto(function_return(), state); 4712 } else { 4713 ASSERT(context->IsValue()); 4714 HValue* rhs = environment()->arguments_environment()->Lookup(1); 4715 AddLeaveInlined(rhs, state); 4716 } 4717 } else { 4718 // Return from a normal inlined function. Visit the subexpression in the 4719 // expression context of the call. 4720 if (context->IsTest()) { 4721 TestContext* test = TestContext::cast(context); 4722 VisitForControl(stmt->expression(), test->if_true(), test->if_false()); 4723 } else if (context->IsEffect()) { 4724 // Visit in value context and ignore the result. This is needed to keep 4725 // environment in sync with full-codegen since some visitors (e.g. 4726 // VisitCountOperation) use the operand stack differently depending on 4727 // context. 4728 CHECK_ALIVE(VisitForValue(stmt->expression())); 4729 Pop(); 4730 Goto(function_return(), state); 4731 } else { 4732 ASSERT(context->IsValue()); 4733 CHECK_ALIVE(VisitForValue(stmt->expression())); 4734 AddLeaveInlined(Pop(), state); 4735 } 4736 } 4737 set_current_block(NULL); 4738 } 4739 4740 4741 void HOptimizedGraphBuilder::VisitWithStatement(WithStatement* stmt) { 4742 ASSERT(!HasStackOverflow()); 4743 ASSERT(current_block() != NULL); 4744 ASSERT(current_block()->HasPredecessor()); 4745 return Bailout(kWithStatement); 4746 } 4747 4748 4749 void HOptimizedGraphBuilder::VisitSwitchStatement(SwitchStatement* stmt) { 4750 ASSERT(!HasStackOverflow()); 4751 ASSERT(current_block() != NULL); 4752 ASSERT(current_block()->HasPredecessor()); 4753 4754 // We only optimize switch statements with a bounded number of clauses. 4755 const int kCaseClauseLimit = 128; 4756 ZoneList<CaseClause*>* clauses = stmt->cases(); 4757 int clause_count = clauses->length(); 4758 ZoneList<HBasicBlock*> body_blocks(clause_count, zone()); 4759 if (clause_count > kCaseClauseLimit) { 4760 return Bailout(kSwitchStatementTooManyClauses); 4761 } 4762 4763 CHECK_ALIVE(VisitForValue(stmt->tag())); 4764 Add<HSimulate>(stmt->EntryId()); 4765 HValue* tag_value = Top(); 4766 Type* tag_type = stmt->tag()->bounds().lower; 4767 4768 // 1. Build all the tests, with dangling true branches 4769 BailoutId default_id = BailoutId::None(); 4770 for (int i = 0; i < clause_count; ++i) { 4771 CaseClause* clause = clauses->at(i); 4772 if (clause->is_default()) { 4773 body_blocks.Add(NULL, zone()); 4774 if (default_id.IsNone()) default_id = clause->EntryId(); 4775 continue; 4776 } 4777 4778 // Generate a compare and branch. 4779 CHECK_ALIVE(VisitForValue(clause->label())); 4780 HValue* label_value = Pop(); 4781 4782 Type* label_type = clause->label()->bounds().lower; 4783 Type* combined_type = clause->compare_type(); 4784 HControlInstruction* compare = BuildCompareInstruction( 4785 Token::EQ_STRICT, tag_value, label_value, tag_type, label_type, 4786 combined_type, 4787 ScriptPositionToSourcePosition(stmt->tag()->position()), 4788 ScriptPositionToSourcePosition(clause->label()->position()), 4789 PUSH_BEFORE_SIMULATE, clause->id()); 4790 4791 HBasicBlock* next_test_block = graph()->CreateBasicBlock(); 4792 HBasicBlock* body_block = graph()->CreateBasicBlock(); 4793 body_blocks.Add(body_block, zone()); 4794 compare->SetSuccessorAt(0, body_block); 4795 compare->SetSuccessorAt(1, next_test_block); 4796 FinishCurrentBlock(compare); 4797 4798 set_current_block(body_block); 4799 Drop(1); // tag_value 4800 4801 set_current_block(next_test_block); 4802 } 4803 4804 // Save the current block to use for the default or to join with the 4805 // exit. 4806 HBasicBlock* last_block = current_block(); 4807 Drop(1); // tag_value 4808 4809 // 2. Loop over the clauses and the linked list of tests in lockstep, 4810 // translating the clause bodies. 4811 HBasicBlock* fall_through_block = NULL; 4812 4813 BreakAndContinueInfo break_info(stmt, scope()); 4814 { BreakAndContinueScope push(&break_info, this); 4815 for (int i = 0; i < clause_count; ++i) { 4816 CaseClause* clause = clauses->at(i); 4817 4818 // Identify the block where normal (non-fall-through) control flow 4819 // goes to. 4820 HBasicBlock* normal_block = NULL; 4821 if (clause->is_default()) { 4822 if (last_block == NULL) continue; 4823 normal_block = last_block; 4824 last_block = NULL; // Cleared to indicate we've handled it. 4825 } else { 4826 normal_block = body_blocks[i]; 4827 } 4828 4829 if (fall_through_block == NULL) { 4830 set_current_block(normal_block); 4831 } else { 4832 HBasicBlock* join = CreateJoin(fall_through_block, 4833 normal_block, 4834 clause->EntryId()); 4835 set_current_block(join); 4836 } 4837 4838 CHECK_BAILOUT(VisitStatements(clause->statements())); 4839 fall_through_block = current_block(); 4840 } 4841 } 4842 4843 // Create an up-to-3-way join. Use the break block if it exists since 4844 // it's already a join block. 4845 HBasicBlock* break_block = break_info.break_block(); 4846 if (break_block == NULL) { 4847 set_current_block(CreateJoin(fall_through_block, 4848 last_block, 4849 stmt->ExitId())); 4850 } else { 4851 if (fall_through_block != NULL) Goto(fall_through_block, break_block); 4852 if (last_block != NULL) Goto(last_block, break_block); 4853 break_block->SetJoinId(stmt->ExitId()); 4854 set_current_block(break_block); 4855 } 4856 } 4857 4858 4859 void HOptimizedGraphBuilder::VisitLoopBody(IterationStatement* stmt, 4860 HBasicBlock* loop_entry) { 4861 Add<HSimulate>(stmt->StackCheckId()); 4862 HStackCheck* stack_check = 4863 HStackCheck::cast(Add<HStackCheck>(HStackCheck::kBackwardsBranch)); 4864 ASSERT(loop_entry->IsLoopHeader()); 4865 loop_entry->loop_information()->set_stack_check(stack_check); 4866 CHECK_BAILOUT(Visit(stmt->body())); 4867 } 4868 4869 4870 void HOptimizedGraphBuilder::VisitDoWhileStatement(DoWhileStatement* stmt) { 4871 ASSERT(!HasStackOverflow()); 4872 ASSERT(current_block() != NULL); 4873 ASSERT(current_block()->HasPredecessor()); 4874 ASSERT(current_block() != NULL); 4875 HBasicBlock* loop_entry = BuildLoopEntry(stmt); 4876 4877 BreakAndContinueInfo break_info(stmt, scope()); 4878 { 4879 BreakAndContinueScope push(&break_info, this); 4880 CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry)); 4881 } 4882 HBasicBlock* body_exit = 4883 JoinContinue(stmt, current_block(), break_info.continue_block()); 4884 HBasicBlock* loop_successor = NULL; 4885 if (body_exit != NULL && !stmt->cond()->ToBooleanIsTrue()) { 4886 set_current_block(body_exit); 4887 loop_successor = graph()->CreateBasicBlock(); 4888 if (stmt->cond()->ToBooleanIsFalse()) { 4889 loop_entry->loop_information()->stack_check()->Eliminate(); 4890 Goto(loop_successor); 4891 body_exit = NULL; 4892 } else { 4893 // The block for a true condition, the actual predecessor block of the 4894 // back edge. 4895 body_exit = graph()->CreateBasicBlock(); 4896 CHECK_BAILOUT(VisitForControl(stmt->cond(), body_exit, loop_successor)); 4897 } 4898 if (body_exit != NULL && body_exit->HasPredecessor()) { 4899 body_exit->SetJoinId(stmt->BackEdgeId()); 4900 } else { 4901 body_exit = NULL; 4902 } 4903 if (loop_successor->HasPredecessor()) { 4904 loop_successor->SetJoinId(stmt->ExitId()); 4905 } else { 4906 loop_successor = NULL; 4907 } 4908 } 4909 HBasicBlock* loop_exit = CreateLoop(stmt, 4910 loop_entry, 4911 body_exit, 4912 loop_successor, 4913 break_info.break_block()); 4914 set_current_block(loop_exit); 4915 } 4916 4917 4918 void HOptimizedGraphBuilder::VisitWhileStatement(WhileStatement* stmt) { 4919 ASSERT(!HasStackOverflow()); 4920 ASSERT(current_block() != NULL); 4921 ASSERT(current_block()->HasPredecessor()); 4922 ASSERT(current_block() != NULL); 4923 HBasicBlock* loop_entry = BuildLoopEntry(stmt); 4924 4925 // If the condition is constant true, do not generate a branch. 4926 HBasicBlock* loop_successor = NULL; 4927 if (!stmt->cond()->ToBooleanIsTrue()) { 4928 HBasicBlock* body_entry = graph()->CreateBasicBlock(); 4929 loop_successor = graph()->CreateBasicBlock(); 4930 CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor)); 4931 if (body_entry->HasPredecessor()) { 4932 body_entry->SetJoinId(stmt->BodyId()); 4933 set_current_block(body_entry); 4934 } 4935 if (loop_successor->HasPredecessor()) { 4936 loop_successor->SetJoinId(stmt->ExitId()); 4937 } else { 4938 loop_successor = NULL; 4939 } 4940 } 4941 4942 BreakAndContinueInfo break_info(stmt, scope()); 4943 if (current_block() != NULL) { 4944 BreakAndContinueScope push(&break_info, this); 4945 CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry)); 4946 } 4947 HBasicBlock* body_exit = 4948 JoinContinue(stmt, current_block(), break_info.continue_block()); 4949 HBasicBlock* loop_exit = CreateLoop(stmt, 4950 loop_entry, 4951 body_exit, 4952 loop_successor, 4953 break_info.break_block()); 4954 set_current_block(loop_exit); 4955 } 4956 4957 4958 void HOptimizedGraphBuilder::VisitForStatement(ForStatement* stmt) { 4959 ASSERT(!HasStackOverflow()); 4960 ASSERT(current_block() != NULL); 4961 ASSERT(current_block()->HasPredecessor()); 4962 if (stmt->init() != NULL) { 4963 CHECK_ALIVE(Visit(stmt->init())); 4964 } 4965 ASSERT(current_block() != NULL); 4966 HBasicBlock* loop_entry = BuildLoopEntry(stmt); 4967 4968 HBasicBlock* loop_successor = NULL; 4969 if (stmt->cond() != NULL) { 4970 HBasicBlock* body_entry = graph()->CreateBasicBlock(); 4971 loop_successor = graph()->CreateBasicBlock(); 4972 CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor)); 4973 if (body_entry->HasPredecessor()) { 4974 body_entry->SetJoinId(stmt->BodyId()); 4975 set_current_block(body_entry); 4976 } 4977 if (loop_successor->HasPredecessor()) { 4978 loop_successor->SetJoinId(stmt->ExitId()); 4979 } else { 4980 loop_successor = NULL; 4981 } 4982 } 4983 4984 BreakAndContinueInfo break_info(stmt, scope()); 4985 if (current_block() != NULL) { 4986 BreakAndContinueScope push(&break_info, this); 4987 CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry)); 4988 } 4989 HBasicBlock* body_exit = 4990 JoinContinue(stmt, current_block(), break_info.continue_block()); 4991 4992 if (stmt->next() != NULL && body_exit != NULL) { 4993 set_current_block(body_exit); 4994 CHECK_BAILOUT(Visit(stmt->next())); 4995 body_exit = current_block(); 4996 } 4997 4998 HBasicBlock* loop_exit = CreateLoop(stmt, 4999 loop_entry, 5000 body_exit, 5001 loop_successor, 5002 break_info.break_block()); 5003 set_current_block(loop_exit); 5004 } 5005 5006 5007 void HOptimizedGraphBuilder::VisitForInStatement(ForInStatement* stmt) { 5008 ASSERT(!HasStackOverflow()); 5009 ASSERT(current_block() != NULL); 5010 ASSERT(current_block()->HasPredecessor()); 5011 5012 if (!FLAG_optimize_for_in) { 5013 return Bailout(kForInStatementOptimizationIsDisabled); 5014 } 5015 5016 if (stmt->for_in_type() != ForInStatement::FAST_FOR_IN) { 5017 return Bailout(kForInStatementIsNotFastCase); 5018 } 5019 5020 if (!stmt->each()->IsVariableProxy() || 5021 !stmt->each()->AsVariableProxy()->var()->IsStackLocal()) { 5022 return Bailout(kForInStatementWithNonLocalEachVariable); 5023 } 5024 5025 Variable* each_var = stmt->each()->AsVariableProxy()->var(); 5026 5027 CHECK_ALIVE(VisitForValue(stmt->enumerable())); 5028 HValue* enumerable = Top(); // Leave enumerable at the top. 5029 5030 HInstruction* map = Add<HForInPrepareMap>(enumerable); 5031 Add<HSimulate>(stmt->PrepareId()); 5032 5033 HInstruction* array = Add<HForInCacheArray>( 5034 enumerable, map, DescriptorArray::kEnumCacheBridgeCacheIndex); 5035 5036 HInstruction* enum_length = Add<HMapEnumLength>(map); 5037 5038 HInstruction* start_index = Add<HConstant>(0); 5039 5040 Push(map); 5041 Push(array); 5042 Push(enum_length); 5043 Push(start_index); 5044 5045 HInstruction* index_cache = Add<HForInCacheArray>( 5046 enumerable, map, DescriptorArray::kEnumCacheBridgeIndicesCacheIndex); 5047 HForInCacheArray::cast(array)->set_index_cache( 5048 HForInCacheArray::cast(index_cache)); 5049 5050 HBasicBlock* loop_entry = BuildLoopEntry(stmt); 5051 5052 HValue* index = environment()->ExpressionStackAt(0); 5053 HValue* limit = environment()->ExpressionStackAt(1); 5054 5055 // Check that we still have more keys. 5056 HCompareNumericAndBranch* compare_index = 5057 New<HCompareNumericAndBranch>(index, limit, Token::LT); 5058 compare_index->set_observed_input_representation( 5059 Representation::Smi(), Representation::Smi()); 5060 5061 HBasicBlock* loop_body = graph()->CreateBasicBlock(); 5062 HBasicBlock* loop_successor = graph()->CreateBasicBlock(); 5063 5064 compare_index->SetSuccessorAt(0, loop_body); 5065 compare_index->SetSuccessorAt(1, loop_successor); 5066 FinishCurrentBlock(compare_index); 5067 5068 set_current_block(loop_successor); 5069 Drop(5); 5070 5071 set_current_block(loop_body); 5072 5073 HValue* key = Add<HLoadKeyed>( 5074 environment()->ExpressionStackAt(2), // Enum cache. 5075 environment()->ExpressionStackAt(0), // Iteration index. 5076 environment()->ExpressionStackAt(0), 5077 FAST_ELEMENTS); 5078 5079 // Check if the expected map still matches that of the enumerable. 5080 // If not just deoptimize. 5081 Add<HCheckMapValue>(environment()->ExpressionStackAt(4), 5082 environment()->ExpressionStackAt(3)); 5083 5084 Bind(each_var, key); 5085 5086 BreakAndContinueInfo break_info(stmt, scope(), 5); 5087 { 5088 BreakAndContinueScope push(&break_info, this); 5089 CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry)); 5090 } 5091 5092 HBasicBlock* body_exit = 5093 JoinContinue(stmt, current_block(), break_info.continue_block()); 5094 5095 if (body_exit != NULL) { 5096 set_current_block(body_exit); 5097 5098 HValue* current_index = Pop(); 5099 Push(AddUncasted<HAdd>(current_index, graph()->GetConstant1())); 5100 body_exit = current_block(); 5101 } 5102 5103 HBasicBlock* loop_exit = CreateLoop(stmt, 5104 loop_entry, 5105 body_exit, 5106 loop_successor, 5107 break_info.break_block()); 5108 5109 set_current_block(loop_exit); 5110 } 5111 5112 5113 void HOptimizedGraphBuilder::VisitForOfStatement(ForOfStatement* stmt) { 5114 ASSERT(!HasStackOverflow()); 5115 ASSERT(current_block() != NULL); 5116 ASSERT(current_block()->HasPredecessor()); 5117 return Bailout(kForOfStatement); 5118 } 5119 5120 5121 void HOptimizedGraphBuilder::VisitTryCatchStatement(TryCatchStatement* stmt) { 5122 ASSERT(!HasStackOverflow()); 5123 ASSERT(current_block() != NULL); 5124 ASSERT(current_block()->HasPredecessor()); 5125 return Bailout(kTryCatchStatement); 5126 } 5127 5128 5129 void HOptimizedGraphBuilder::VisitTryFinallyStatement( 5130 TryFinallyStatement* stmt) { 5131 ASSERT(!HasStackOverflow()); 5132 ASSERT(current_block() != NULL); 5133 ASSERT(current_block()->HasPredecessor()); 5134 return Bailout(kTryFinallyStatement); 5135 } 5136 5137 5138 void HOptimizedGraphBuilder::VisitDebuggerStatement(DebuggerStatement* stmt) { 5139 ASSERT(!HasStackOverflow()); 5140 ASSERT(current_block() != NULL); 5141 ASSERT(current_block()->HasPredecessor()); 5142 return Bailout(kDebuggerStatement); 5143 } 5144 5145 5146 void HOptimizedGraphBuilder::VisitCaseClause(CaseClause* clause) { 5147 UNREACHABLE(); 5148 } 5149 5150 5151 void HOptimizedGraphBuilder::VisitFunctionLiteral(FunctionLiteral* expr) { 5152 ASSERT(!HasStackOverflow()); 5153 ASSERT(current_block() != NULL); 5154 ASSERT(current_block()->HasPredecessor()); 5155 Handle<SharedFunctionInfo> shared_info = expr->shared_info(); 5156 if (shared_info.is_null()) { 5157 shared_info = Compiler::BuildFunctionInfo(expr, current_info()->script()); 5158 } 5159 // We also have a stack overflow if the recursive compilation did. 5160 if (HasStackOverflow()) return; 5161 HFunctionLiteral* instr = 5162 New<HFunctionLiteral>(shared_info, expr->pretenure()); 5163 return ast_context()->ReturnInstruction(instr, expr->id()); 5164 } 5165 5166 5167 void HOptimizedGraphBuilder::VisitNativeFunctionLiteral( 5168 NativeFunctionLiteral* expr) { 5169 ASSERT(!HasStackOverflow()); 5170 ASSERT(current_block() != NULL); 5171 ASSERT(current_block()->HasPredecessor()); 5172 return Bailout(kNativeFunctionLiteral); 5173 } 5174 5175 5176 void HOptimizedGraphBuilder::VisitConditional(Conditional* expr) { 5177 ASSERT(!HasStackOverflow()); 5178 ASSERT(current_block() != NULL); 5179 ASSERT(current_block()->HasPredecessor()); 5180 HBasicBlock* cond_true = graph()->CreateBasicBlock(); 5181 HBasicBlock* cond_false = graph()->CreateBasicBlock(); 5182 CHECK_BAILOUT(VisitForControl(expr->condition(), cond_true, cond_false)); 5183 5184 // Visit the true and false subexpressions in the same AST context as the 5185 // whole expression. 5186 if (cond_true->HasPredecessor()) { 5187 cond_true->SetJoinId(expr->ThenId()); 5188 set_current_block(cond_true); 5189 CHECK_BAILOUT(Visit(expr->then_expression())); 5190 cond_true = current_block(); 5191 } else { 5192 cond_true = NULL; 5193 } 5194 5195 if (cond_false->HasPredecessor()) { 5196 cond_false->SetJoinId(expr->ElseId()); 5197 set_current_block(cond_false); 5198 CHECK_BAILOUT(Visit(expr->else_expression())); 5199 cond_false = current_block(); 5200 } else { 5201 cond_false = NULL; 5202 } 5203 5204 if (!ast_context()->IsTest()) { 5205 HBasicBlock* join = CreateJoin(cond_true, cond_false, expr->id()); 5206 set_current_block(join); 5207 if (join != NULL && !ast_context()->IsEffect()) { 5208 return ast_context()->ReturnValue(Pop()); 5209 } 5210 } 5211 } 5212 5213 5214 HOptimizedGraphBuilder::GlobalPropertyAccess 5215 HOptimizedGraphBuilder::LookupGlobalProperty( 5216 Variable* var, LookupResult* lookup, PropertyAccessType access_type) { 5217 if (var->is_this() || !current_info()->has_global_object()) { 5218 return kUseGeneric; 5219 } 5220 Handle<GlobalObject> global(current_info()->global_object()); 5221 global->Lookup(var->name(), lookup); 5222 if (!lookup->IsNormal() || 5223 (access_type == STORE && lookup->IsReadOnly()) || 5224 lookup->holder() != *global) { 5225 return kUseGeneric; 5226 } 5227 5228 return kUseCell; 5229 } 5230 5231 5232 HValue* HOptimizedGraphBuilder::BuildContextChainWalk(Variable* var) { 5233 ASSERT(var->IsContextSlot()); 5234 HValue* context = environment()->context(); 5235 int length = scope()->ContextChainLength(var->scope()); 5236 while (length-- > 0) { 5237 context = Add<HLoadNamedField>( 5238 context, static_cast<HValue*>(NULL), 5239 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX)); 5240 } 5241 return context; 5242 } 5243 5244 5245 void HOptimizedGraphBuilder::VisitVariableProxy(VariableProxy* expr) { 5246 if (expr->is_this()) { 5247 current_info()->set_this_has_uses(true); 5248 } 5249 5250 ASSERT(!HasStackOverflow()); 5251 ASSERT(current_block() != NULL); 5252 ASSERT(current_block()->HasPredecessor()); 5253 Variable* variable = expr->var(); 5254 switch (variable->location()) { 5255 case Variable::UNALLOCATED: { 5256 if (IsLexicalVariableMode(variable->mode())) { 5257 // TODO(rossberg): should this be an ASSERT? 5258 return Bailout(kReferenceToGlobalLexicalVariable); 5259 } 5260 // Handle known global constants like 'undefined' specially to avoid a 5261 // load from a global cell for them. 5262 Handle<Object> constant_value = 5263 isolate()->factory()->GlobalConstantFor(variable->name()); 5264 if (!constant_value.is_null()) { 5265 HConstant* instr = New<HConstant>(constant_value); 5266 return ast_context()->ReturnInstruction(instr, expr->id()); 5267 } 5268 5269 LookupResult lookup(isolate()); 5270 GlobalPropertyAccess type = LookupGlobalProperty(variable, &lookup, LOAD); 5271 5272 if (type == kUseCell && 5273 current_info()->global_object()->IsAccessCheckNeeded()) { 5274 type = kUseGeneric; 5275 } 5276 5277 if (type == kUseCell) { 5278 Handle<GlobalObject> global(current_info()->global_object()); 5279 Handle<PropertyCell> cell(global->GetPropertyCell(&lookup)); 5280 if (cell->type()->IsConstant()) { 5281 PropertyCell::AddDependentCompilationInfo(cell, top_info()); 5282 Handle<Object> constant_object = cell->type()->AsConstant()->Value(); 5283 if (constant_object->IsConsString()) { 5284 constant_object = 5285 String::Flatten(Handle<String>::cast(constant_object)); 5286 } 5287 HConstant* constant = New<HConstant>(constant_object); 5288 return ast_context()->ReturnInstruction(constant, expr->id()); 5289 } else { 5290 HLoadGlobalCell* instr = 5291 New<HLoadGlobalCell>(cell, lookup.GetPropertyDetails()); 5292 return ast_context()->ReturnInstruction(instr, expr->id()); 5293 } 5294 } else { 5295 HValue* global_object = Add<HLoadNamedField>( 5296 context(), static_cast<HValue*>(NULL), 5297 HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX)); 5298 HLoadGlobalGeneric* instr = 5299 New<HLoadGlobalGeneric>(global_object, 5300 variable->name(), 5301 ast_context()->is_for_typeof()); 5302 return ast_context()->ReturnInstruction(instr, expr->id()); 5303 } 5304 } 5305 5306 case Variable::PARAMETER: 5307 case Variable::LOCAL: { 5308 HValue* value = LookupAndMakeLive(variable); 5309 if (value == graph()->GetConstantHole()) { 5310 ASSERT(IsDeclaredVariableMode(variable->mode()) && 5311 variable->mode() != VAR); 5312 return Bailout(kReferenceToUninitializedVariable); 5313 } 5314 return ast_context()->ReturnValue(value); 5315 } 5316 5317 case Variable::CONTEXT: { 5318 HValue* context = BuildContextChainWalk(variable); 5319 HLoadContextSlot::Mode mode; 5320 switch (variable->mode()) { 5321 case LET: 5322 case CONST: 5323 mode = HLoadContextSlot::kCheckDeoptimize; 5324 break; 5325 case CONST_LEGACY: 5326 mode = HLoadContextSlot::kCheckReturnUndefined; 5327 break; 5328 default: 5329 mode = HLoadContextSlot::kNoCheck; 5330 break; 5331 } 5332 HLoadContextSlot* instr = 5333 new(zone()) HLoadContextSlot(context, variable->index(), mode); 5334 return ast_context()->ReturnInstruction(instr, expr->id()); 5335 } 5336 5337 case Variable::LOOKUP: 5338 return Bailout(kReferenceToAVariableWhichRequiresDynamicLookup); 5339 } 5340 } 5341 5342 5343 void HOptimizedGraphBuilder::VisitLiteral(Literal* expr) { 5344 ASSERT(!HasStackOverflow()); 5345 ASSERT(current_block() != NULL); 5346 ASSERT(current_block()->HasPredecessor()); 5347 HConstant* instr = New<HConstant>(expr->value()); 5348 return ast_context()->ReturnInstruction(instr, expr->id()); 5349 } 5350 5351 5352 void HOptimizedGraphBuilder::VisitRegExpLiteral(RegExpLiteral* expr) { 5353 ASSERT(!HasStackOverflow()); 5354 ASSERT(current_block() != NULL); 5355 ASSERT(current_block()->HasPredecessor()); 5356 Handle<JSFunction> closure = function_state()->compilation_info()->closure(); 5357 Handle<FixedArray> literals(closure->literals()); 5358 HRegExpLiteral* instr = New<HRegExpLiteral>(literals, 5359 expr->pattern(), 5360 expr->flags(), 5361 expr->literal_index()); 5362 return ast_context()->ReturnInstruction(instr, expr->id()); 5363 } 5364 5365 5366 static bool CanInlinePropertyAccess(Type* type) { 5367 if (type->Is(Type::NumberOrString())) return true; 5368 if (!type->IsClass()) return false; 5369 Handle<Map> map = type->AsClass()->Map(); 5370 return map->IsJSObjectMap() && 5371 !map->is_dictionary_map() && 5372 !map->has_named_interceptor(); 5373 } 5374 5375 5376 // Determines whether the given array or object literal boilerplate satisfies 5377 // all limits to be considered for fast deep-copying and computes the total 5378 // size of all objects that are part of the graph. 5379 static bool IsFastLiteral(Handle<JSObject> boilerplate, 5380 int max_depth, 5381 int* max_properties) { 5382 if (boilerplate->map()->is_deprecated() && 5383 !JSObject::TryMigrateInstance(boilerplate)) { 5384 return false; 5385 } 5386 5387 ASSERT(max_depth >= 0 && *max_properties >= 0); 5388 if (max_depth == 0) return false; 5389 5390 Isolate* isolate = boilerplate->GetIsolate(); 5391 Handle<FixedArrayBase> elements(boilerplate->elements()); 5392 if (elements->length() > 0 && 5393 elements->map() != isolate->heap()->fixed_cow_array_map()) { 5394 if (boilerplate->HasFastObjectElements()) { 5395 Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements); 5396 int length = elements->length(); 5397 for (int i = 0; i < length; i++) { 5398 if ((*max_properties)-- == 0) return false; 5399 Handle<Object> value(fast_elements->get(i), isolate); 5400 if (value->IsJSObject()) { 5401 Handle<JSObject> value_object = Handle<JSObject>::cast(value); 5402 if (!IsFastLiteral(value_object, 5403 max_depth - 1, 5404 max_properties)) { 5405 return false; 5406 } 5407 } 5408 } 5409 } else if (!boilerplate->HasFastDoubleElements()) { 5410 return false; 5411 } 5412 } 5413 5414 Handle<FixedArray> properties(boilerplate->properties()); 5415 if (properties->length() > 0) { 5416 return false; 5417 } else { 5418 Handle<DescriptorArray> descriptors( 5419 boilerplate->map()->instance_descriptors()); 5420 int limit = boilerplate->map()->NumberOfOwnDescriptors(); 5421 for (int i = 0; i < limit; i++) { 5422 PropertyDetails details = descriptors->GetDetails(i); 5423 if (details.type() != FIELD) continue; 5424 int index = descriptors->GetFieldIndex(i); 5425 if ((*max_properties)-- == 0) return false; 5426 Handle<Object> value(boilerplate->InObjectPropertyAt(index), isolate); 5427 if (value->IsJSObject()) { 5428 Handle<JSObject> value_object = Handle<JSObject>::cast(value); 5429 if (!IsFastLiteral(value_object, 5430 max_depth - 1, 5431 max_properties)) { 5432 return false; 5433 } 5434 } 5435 } 5436 } 5437 return true; 5438 } 5439 5440 5441 void HOptimizedGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) { 5442 ASSERT(!HasStackOverflow()); 5443 ASSERT(current_block() != NULL); 5444 ASSERT(current_block()->HasPredecessor()); 5445 expr->BuildConstantProperties(isolate()); 5446 Handle<JSFunction> closure = function_state()->compilation_info()->closure(); 5447 HInstruction* literal; 5448 5449 // Check whether to use fast or slow deep-copying for boilerplate. 5450 int max_properties = kMaxFastLiteralProperties; 5451 Handle<Object> literals_cell(closure->literals()->get(expr->literal_index()), 5452 isolate()); 5453 Handle<AllocationSite> site; 5454 Handle<JSObject> boilerplate; 5455 if (!literals_cell->IsUndefined()) { 5456 // Retrieve the boilerplate 5457 site = Handle<AllocationSite>::cast(literals_cell); 5458 boilerplate = Handle<JSObject>(JSObject::cast(site->transition_info()), 5459 isolate()); 5460 } 5461 5462 if (!boilerplate.is_null() && 5463 IsFastLiteral(boilerplate, kMaxFastLiteralDepth, &max_properties)) { 5464 AllocationSiteUsageContext usage_context(isolate(), site, false); 5465 usage_context.EnterNewScope(); 5466 literal = BuildFastLiteral(boilerplate, &usage_context); 5467 usage_context.ExitScope(site, boilerplate); 5468 } else { 5469 NoObservableSideEffectsScope no_effects(this); 5470 Handle<FixedArray> closure_literals(closure->literals(), isolate()); 5471 Handle<FixedArray> constant_properties = expr->constant_properties(); 5472 int literal_index = expr->literal_index(); 5473 int flags = expr->fast_elements() 5474 ? ObjectLiteral::kFastElements : ObjectLiteral::kNoFlags; 5475 flags |= expr->has_function() 5476 ? ObjectLiteral::kHasFunction : ObjectLiteral::kNoFlags; 5477 5478 Add<HPushArguments>(Add<HConstant>(closure_literals), 5479 Add<HConstant>(literal_index), 5480 Add<HConstant>(constant_properties), 5481 Add<HConstant>(flags)); 5482 5483 // TODO(mvstanton): Add a flag to turn off creation of any 5484 // AllocationMementos for this call: we are in crankshaft and should have 5485 // learned enough about transition behavior to stop emitting mementos. 5486 Runtime::FunctionId function_id = Runtime::kHiddenCreateObjectLiteral; 5487 literal = Add<HCallRuntime>(isolate()->factory()->empty_string(), 5488 Runtime::FunctionForId(function_id), 5489 4); 5490 } 5491 5492 // The object is expected in the bailout environment during computation 5493 // of the property values and is the value of the entire expression. 5494 Push(literal); 5495 5496 expr->CalculateEmitStore(zone()); 5497 5498 for (int i = 0; i < expr->properties()->length(); i++) { 5499 ObjectLiteral::Property* property = expr->properties()->at(i); 5500 if (property->IsCompileTimeValue()) continue; 5501 5502 Literal* key = property->key(); 5503 Expression* value = property->value(); 5504 5505 switch (property->kind()) { 5506 case ObjectLiteral::Property::MATERIALIZED_LITERAL: 5507 ASSERT(!CompileTimeValue::IsCompileTimeValue(value)); 5508 // Fall through. 5509 case ObjectLiteral::Property::COMPUTED: 5510 if (key->value()->IsInternalizedString()) { 5511 if (property->emit_store()) { 5512 CHECK_ALIVE(VisitForValue(value)); 5513 HValue* value = Pop(); 5514 Handle<Map> map = property->GetReceiverType(); 5515 Handle<String> name = property->key()->AsPropertyName(); 5516 HInstruction* store; 5517 if (map.is_null()) { 5518 // If we don't know the monomorphic type, do a generic store. 5519 CHECK_ALIVE(store = BuildNamedGeneric( 5520 STORE, literal, name, value)); 5521 } else { 5522 PropertyAccessInfo info(this, STORE, ToType(map), name); 5523 if (info.CanAccessMonomorphic()) { 5524 HValue* checked_literal = Add<HCheckMaps>(literal, map); 5525 ASSERT(!info.lookup()->IsPropertyCallbacks()); 5526 store = BuildMonomorphicAccess( 5527 &info, literal, checked_literal, value, 5528 BailoutId::None(), BailoutId::None()); 5529 } else { 5530 CHECK_ALIVE(store = BuildNamedGeneric( 5531 STORE, literal, name, value)); 5532 } 5533 } 5534 AddInstruction(store); 5535 if (store->HasObservableSideEffects()) { 5536 Add<HSimulate>(key->id(), REMOVABLE_SIMULATE); 5537 } 5538 } else { 5539 CHECK_ALIVE(VisitForEffect(value)); 5540 } 5541 break; 5542 } 5543 // Fall through. 5544 case ObjectLiteral::Property::PROTOTYPE: 5545 case ObjectLiteral::Property::SETTER: 5546 case ObjectLiteral::Property::GETTER: 5547 return Bailout(kObjectLiteralWithComplexProperty); 5548 default: UNREACHABLE(); 5549 } 5550 } 5551 5552 if (expr->has_function()) { 5553 // Return the result of the transformation to fast properties 5554 // instead of the original since this operation changes the map 5555 // of the object. This makes sure that the original object won't 5556 // be used by other optimized code before it is transformed 5557 // (e.g. because of code motion). 5558 HToFastProperties* result = Add<HToFastProperties>(Pop()); 5559 return ast_context()->ReturnValue(result); 5560 } else { 5561 return ast_context()->ReturnValue(Pop()); 5562 } 5563 } 5564 5565 5566 void HOptimizedGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) { 5567 ASSERT(!HasStackOverflow()); 5568 ASSERT(current_block() != NULL); 5569 ASSERT(current_block()->HasPredecessor()); 5570 expr->BuildConstantElements(isolate()); 5571 ZoneList<Expression*>* subexprs = expr->values(); 5572 int length = subexprs->length(); 5573 HInstruction* literal; 5574 5575 Handle<AllocationSite> site; 5576 Handle<FixedArray> literals(environment()->closure()->literals(), isolate()); 5577 bool uninitialized = false; 5578 Handle<Object> literals_cell(literals->get(expr->literal_index()), 5579 isolate()); 5580 Handle<JSObject> boilerplate_object; 5581 if (literals_cell->IsUndefined()) { 5582 uninitialized = true; 5583 Handle<Object> raw_boilerplate; 5584 ASSIGN_RETURN_ON_EXCEPTION_VALUE( 5585 isolate(), raw_boilerplate, 5586 Runtime::CreateArrayLiteralBoilerplate( 5587 isolate(), literals, expr->constant_elements()), 5588 Bailout(kArrayBoilerplateCreationFailed)); 5589 5590 boilerplate_object = Handle<JSObject>::cast(raw_boilerplate); 5591 AllocationSiteCreationContext creation_context(isolate()); 5592 site = creation_context.EnterNewScope(); 5593 if (JSObject::DeepWalk(boilerplate_object, &creation_context).is_null()) { 5594 return Bailout(kArrayBoilerplateCreationFailed); 5595 } 5596 creation_context.ExitScope(site, boilerplate_object); 5597 literals->set(expr->literal_index(), *site); 5598 5599 if (boilerplate_object->elements()->map() == 5600 isolate()->heap()->fixed_cow_array_map()) { 5601 isolate()->counters()->cow_arrays_created_runtime()->Increment(); 5602 } 5603 } else { 5604 ASSERT(literals_cell->IsAllocationSite()); 5605 site = Handle<AllocationSite>::cast(literals_cell); 5606 boilerplate_object = Handle<JSObject>( 5607 JSObject::cast(site->transition_info()), isolate()); 5608 } 5609 5610 ASSERT(!boilerplate_object.is_null()); 5611 ASSERT(site->SitePointsToLiteral()); 5612 5613 ElementsKind boilerplate_elements_kind = 5614 boilerplate_object->GetElementsKind(); 5615 5616 // Check whether to use fast or slow deep-copying for boilerplate. 5617 int max_properties = kMaxFastLiteralProperties; 5618 if (IsFastLiteral(boilerplate_object, 5619 kMaxFastLiteralDepth, 5620 &max_properties)) { 5621 AllocationSiteUsageContext usage_context(isolate(), site, false); 5622 usage_context.EnterNewScope(); 5623 literal = BuildFastLiteral(boilerplate_object, &usage_context); 5624 usage_context.ExitScope(site, boilerplate_object); 5625 } else { 5626 NoObservableSideEffectsScope no_effects(this); 5627 // Boilerplate already exists and constant elements are never accessed, 5628 // pass an empty fixed array to the runtime function instead. 5629 Handle<FixedArray> constants = isolate()->factory()->empty_fixed_array(); 5630 int literal_index = expr->literal_index(); 5631 int flags = expr->depth() == 1 5632 ? ArrayLiteral::kShallowElements 5633 : ArrayLiteral::kNoFlags; 5634 flags |= ArrayLiteral::kDisableMementos; 5635 5636 Add<HPushArguments>(Add<HConstant>(literals), 5637 Add<HConstant>(literal_index), 5638 Add<HConstant>(constants), 5639 Add<HConstant>(flags)); 5640 5641 // TODO(mvstanton): Consider a flag to turn off creation of any 5642 // AllocationMementos for this call: we are in crankshaft and should have 5643 // learned enough about transition behavior to stop emitting mementos. 5644 Runtime::FunctionId function_id = Runtime::kHiddenCreateArrayLiteral; 5645 literal = Add<HCallRuntime>(isolate()->factory()->empty_string(), 5646 Runtime::FunctionForId(function_id), 5647 4); 5648 5649 // De-opt if elements kind changed from boilerplate_elements_kind. 5650 Handle<Map> map = Handle<Map>(boilerplate_object->map(), isolate()); 5651 literal = Add<HCheckMaps>(literal, map); 5652 } 5653 5654 // The array is expected in the bailout environment during computation 5655 // of the property values and is the value of the entire expression. 5656 Push(literal); 5657 // The literal index is on the stack, too. 5658 Push(Add<HConstant>(expr->literal_index())); 5659 5660 HInstruction* elements = NULL; 5661 5662 for (int i = 0; i < length; i++) { 5663 Expression* subexpr = subexprs->at(i); 5664 // If the subexpression is a literal or a simple materialized literal it 5665 // is already set in the cloned array. 5666 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue; 5667 5668 CHECK_ALIVE(VisitForValue(subexpr)); 5669 HValue* value = Pop(); 5670 if (!Smi::IsValid(i)) return Bailout(kNonSmiKeyInArrayLiteral); 5671 5672 elements = AddLoadElements(literal); 5673 5674 HValue* key = Add<HConstant>(i); 5675 5676 switch (boilerplate_elements_kind) { 5677 case FAST_SMI_ELEMENTS: 5678 case FAST_HOLEY_SMI_ELEMENTS: 5679 case FAST_ELEMENTS: 5680 case FAST_HOLEY_ELEMENTS: 5681 case FAST_DOUBLE_ELEMENTS: 5682 case FAST_HOLEY_DOUBLE_ELEMENTS: { 5683 HStoreKeyed* instr = Add<HStoreKeyed>(elements, key, value, 5684 boilerplate_elements_kind); 5685 instr->SetUninitialized(uninitialized); 5686 break; 5687 } 5688 default: 5689 UNREACHABLE(); 5690 break; 5691 } 5692 5693 Add<HSimulate>(expr->GetIdForElement(i)); 5694 } 5695 5696 Drop(1); // array literal index 5697 return ast_context()->ReturnValue(Pop()); 5698 } 5699 5700 5701 HCheckMaps* HOptimizedGraphBuilder::AddCheckMap(HValue* object, 5702 Handle<Map> map) { 5703 BuildCheckHeapObject(object); 5704 return Add<HCheckMaps>(object, map); 5705 } 5706 5707 5708 HInstruction* HOptimizedGraphBuilder::BuildLoadNamedField( 5709 PropertyAccessInfo* info, 5710 HValue* checked_object) { 5711 // See if this is a load for an immutable property 5712 if (checked_object->ActualValue()->IsConstant() && 5713 info->lookup()->IsCacheable() && 5714 info->lookup()->IsReadOnly() && info->lookup()->IsDontDelete()) { 5715 Handle<Object> object( 5716 HConstant::cast(checked_object->ActualValue())->handle(isolate())); 5717 5718 if (object->IsJSObject()) { 5719 LookupResult lookup(isolate()); 5720 Handle<JSObject>::cast(object)->Lookup(info->name(), &lookup); 5721 Handle<Object> value(lookup.GetLazyValue(), isolate()); 5722 5723 if (!value->IsTheHole()) { 5724 return New<HConstant>(value); 5725 } 5726 } 5727 } 5728 5729 HObjectAccess access = info->access(); 5730 if (access.representation().IsDouble()) { 5731 // Load the heap number. 5732 checked_object = Add<HLoadNamedField>( 5733 checked_object, static_cast<HValue*>(NULL), 5734 access.WithRepresentation(Representation::Tagged())); 5735 // Load the double value from it. 5736 access = HObjectAccess::ForHeapNumberValue(); 5737 } 5738 5739 SmallMapList* map_list = info->field_maps(); 5740 if (map_list->length() == 0) { 5741 return New<HLoadNamedField>(checked_object, checked_object, access); 5742 } 5743 5744 UniqueSet<Map>* maps = new(zone()) UniqueSet<Map>(map_list->length(), zone()); 5745 for (int i = 0; i < map_list->length(); ++i) { 5746 maps->Add(Unique<Map>::CreateImmovable(map_list->at(i)), zone()); 5747 } 5748 return New<HLoadNamedField>( 5749 checked_object, checked_object, access, maps, info->field_type()); 5750 } 5751 5752 5753 HInstruction* HOptimizedGraphBuilder::BuildStoreNamedField( 5754 PropertyAccessInfo* info, 5755 HValue* checked_object, 5756 HValue* value) { 5757 bool transition_to_field = info->lookup()->IsTransition(); 5758 // TODO(verwaest): Move this logic into PropertyAccessInfo. 5759 HObjectAccess field_access = info->access(); 5760 5761 HStoreNamedField *instr; 5762 if (field_access.representation().IsDouble()) { 5763 HObjectAccess heap_number_access = 5764 field_access.WithRepresentation(Representation::Tagged()); 5765 if (transition_to_field) { 5766 // The store requires a mutable HeapNumber to be allocated. 5767 NoObservableSideEffectsScope no_side_effects(this); 5768 HInstruction* heap_number_size = Add<HConstant>(HeapNumber::kSize); 5769 5770 // TODO(hpayer): Allocation site pretenuring support. 5771 HInstruction* heap_number = Add<HAllocate>(heap_number_size, 5772 HType::HeapObject(), 5773 NOT_TENURED, 5774 HEAP_NUMBER_TYPE); 5775 AddStoreMapConstant(heap_number, isolate()->factory()->heap_number_map()); 5776 Add<HStoreNamedField>(heap_number, HObjectAccess::ForHeapNumberValue(), 5777 value); 5778 instr = New<HStoreNamedField>(checked_object->ActualValue(), 5779 heap_number_access, 5780 heap_number); 5781 } else { 5782 // Already holds a HeapNumber; load the box and write its value field. 5783 HInstruction* heap_number = Add<HLoadNamedField>( 5784 checked_object, static_cast<HValue*>(NULL), heap_number_access); 5785 instr = New<HStoreNamedField>(heap_number, 5786 HObjectAccess::ForHeapNumberValue(), 5787 value, STORE_TO_INITIALIZED_ENTRY); 5788 } 5789 } else { 5790 if (field_access.representation().IsHeapObject()) { 5791 BuildCheckHeapObject(value); 5792 } 5793 5794 if (!info->field_maps()->is_empty()) { 5795 ASSERT(field_access.representation().IsHeapObject()); 5796 value = Add<HCheckMaps>(value, info->field_maps()); 5797 } 5798 5799 // This is a normal store. 5800 instr = New<HStoreNamedField>( 5801 checked_object->ActualValue(), field_access, value, 5802 transition_to_field ? INITIALIZING_STORE : STORE_TO_INITIALIZED_ENTRY); 5803 } 5804 5805 if (transition_to_field) { 5806 Handle<Map> transition(info->transition()); 5807 ASSERT(!transition->is_deprecated()); 5808 instr->SetTransition(Add<HConstant>(transition)); 5809 } 5810 return instr; 5811 } 5812 5813 5814 bool HOptimizedGraphBuilder::PropertyAccessInfo::IsCompatible( 5815 PropertyAccessInfo* info) { 5816 if (!CanInlinePropertyAccess(type_)) return false; 5817 5818 // Currently only handle Type::Number as a polymorphic case. 5819 // TODO(verwaest): Support monomorphic handling of numbers with a HCheckNumber 5820 // instruction. 5821 if (type_->Is(Type::Number())) return false; 5822 5823 // Values are only compatible for monomorphic load if they all behave the same 5824 // regarding value wrappers. 5825 if (type_->Is(Type::NumberOrString())) { 5826 if (!info->type_->Is(Type::NumberOrString())) return false; 5827 } else { 5828 if (info->type_->Is(Type::NumberOrString())) return false; 5829 } 5830 5831 if (!LookupDescriptor()) return false; 5832 5833 if (!lookup_.IsFound()) { 5834 return (!info->lookup_.IsFound() || info->has_holder()) && 5835 map()->prototype() == info->map()->prototype(); 5836 } 5837 5838 // Mismatch if the other access info found the property in the prototype 5839 // chain. 5840 if (info->has_holder()) return false; 5841 5842 if (lookup_.IsPropertyCallbacks()) { 5843 return accessor_.is_identical_to(info->accessor_) && 5844 api_holder_.is_identical_to(info->api_holder_); 5845 } 5846 5847 if (lookup_.IsConstant()) { 5848 return constant_.is_identical_to(info->constant_); 5849 } 5850 5851 ASSERT(lookup_.IsField()); 5852 if (!info->lookup_.IsField()) return false; 5853 5854 Representation r = access_.representation(); 5855 if (IsLoad()) { 5856 if (!info->access_.representation().IsCompatibleForLoad(r)) return false; 5857 } else { 5858 if (!info->access_.representation().IsCompatibleForStore(r)) return false; 5859 } 5860 if (info->access_.offset() != access_.offset()) return false; 5861 if (info->access_.IsInobject() != access_.IsInobject()) return false; 5862 if (IsLoad()) { 5863 if (field_maps_.is_empty()) { 5864 info->field_maps_.Clear(); 5865 } else if (!info->field_maps_.is_empty()) { 5866 for (int i = 0; i < field_maps_.length(); ++i) { 5867 info->field_maps_.AddMapIfMissing(field_maps_.at(i), info->zone()); 5868 } 5869 info->field_maps_.Sort(); 5870 } 5871 } else { 5872 // We can only merge stores that agree on their field maps. The comparison 5873 // below is safe, since we keep the field maps sorted. 5874 if (field_maps_.length() != info->field_maps_.length()) return false; 5875 for (int i = 0; i < field_maps_.length(); ++i) { 5876 if (!field_maps_.at(i).is_identical_to(info->field_maps_.at(i))) { 5877 return false; 5878 } 5879 } 5880 } 5881 info->GeneralizeRepresentation(r); 5882 info->field_type_ = info->field_type_.Combine(field_type_); 5883 return true; 5884 } 5885 5886 5887 bool HOptimizedGraphBuilder::PropertyAccessInfo::LookupDescriptor() { 5888 if (!type_->IsClass()) return true; 5889 map()->LookupDescriptor(NULL, *name_, &lookup_); 5890 return LoadResult(map()); 5891 } 5892 5893 5894 bool HOptimizedGraphBuilder::PropertyAccessInfo::LoadResult(Handle<Map> map) { 5895 if (!IsLoad() && lookup_.IsProperty() && 5896 (lookup_.IsReadOnly() || !lookup_.IsCacheable())) { 5897 return false; 5898 } 5899 5900 if (lookup_.IsField()) { 5901 // Construct the object field access. 5902 access_ = HObjectAccess::ForField(map, &lookup_, name_); 5903 5904 // Load field map for heap objects. 5905 LoadFieldMaps(map); 5906 } else if (lookup_.IsPropertyCallbacks()) { 5907 Handle<Object> callback(lookup_.GetValueFromMap(*map), isolate()); 5908 if (!callback->IsAccessorPair()) return false; 5909 Object* raw_accessor = IsLoad() 5910 ? Handle<AccessorPair>::cast(callback)->getter() 5911 : Handle<AccessorPair>::cast(callback)->setter(); 5912 if (!raw_accessor->IsJSFunction()) return false; 5913 Handle<JSFunction> accessor = handle(JSFunction::cast(raw_accessor)); 5914 if (accessor->shared()->IsApiFunction()) { 5915 CallOptimization call_optimization(accessor); 5916 if (call_optimization.is_simple_api_call()) { 5917 CallOptimization::HolderLookup holder_lookup; 5918 Handle<Map> receiver_map = this->map(); 5919 api_holder_ = call_optimization.LookupHolderOfExpectedType( 5920 receiver_map, &holder_lookup); 5921 } 5922 } 5923 accessor_ = accessor; 5924 } else if (lookup_.IsConstant()) { 5925 constant_ = handle(lookup_.GetConstantFromMap(*map), isolate()); 5926 } 5927 5928 return true; 5929 } 5930 5931 5932 void HOptimizedGraphBuilder::PropertyAccessInfo::LoadFieldMaps( 5933 Handle<Map> map) { 5934 // Clear any previously collected field maps/type. 5935 field_maps_.Clear(); 5936 field_type_ = HType::Tagged(); 5937 5938 // Figure out the field type from the accessor map. 5939 Handle<HeapType> field_type(lookup_.GetFieldTypeFromMap(*map), isolate()); 5940 5941 // Collect the (stable) maps from the field type. 5942 int num_field_maps = field_type->NumClasses(); 5943 if (num_field_maps == 0) return; 5944 ASSERT(access_.representation().IsHeapObject()); 5945 field_maps_.Reserve(num_field_maps, zone()); 5946 HeapType::Iterator<Map> it = field_type->Classes(); 5947 while (!it.Done()) { 5948 Handle<Map> field_map = it.Current(); 5949 if (!field_map->is_stable()) { 5950 field_maps_.Clear(); 5951 return; 5952 } 5953 field_maps_.Add(field_map, zone()); 5954 it.Advance(); 5955 } 5956 field_maps_.Sort(); 5957 ASSERT_EQ(num_field_maps, field_maps_.length()); 5958 5959 // Determine field HType from field HeapType. 5960 field_type_ = HType::FromType<HeapType>(field_type); 5961 ASSERT(field_type_.IsHeapObject()); 5962 5963 // Add dependency on the map that introduced the field. 5964 Map::AddDependentCompilationInfo( 5965 handle(lookup_.GetFieldOwnerFromMap(*map), isolate()), 5966 DependentCode::kFieldTypeGroup, top_info()); 5967 } 5968 5969 5970 bool HOptimizedGraphBuilder::PropertyAccessInfo::LookupInPrototypes() { 5971 Handle<Map> map = this->map(); 5972 5973 while (map->prototype()->IsJSObject()) { 5974 holder_ = handle(JSObject::cast(map->prototype())); 5975 if (holder_->map()->is_deprecated()) { 5976 JSObject::TryMigrateInstance(holder_); 5977 } 5978 map = Handle<Map>(holder_->map()); 5979 if (!CanInlinePropertyAccess(ToType(map))) { 5980 lookup_.NotFound(); 5981 return false; 5982 } 5983 map->LookupDescriptor(*holder_, *name_, &lookup_); 5984 if (lookup_.IsFound()) return LoadResult(map); 5985 } 5986 lookup_.NotFound(); 5987 return true; 5988 } 5989 5990 5991 bool HOptimizedGraphBuilder::PropertyAccessInfo::CanAccessMonomorphic() { 5992 if (!CanInlinePropertyAccess(type_)) return false; 5993 if (IsJSObjectFieldAccessor()) return IsLoad(); 5994 if (!LookupDescriptor()) return false; 5995 if (lookup_.IsFound()) { 5996 if (IsLoad()) return true; 5997 return !lookup_.IsReadOnly() && lookup_.IsCacheable(); 5998 } 5999 if (!LookupInPrototypes()) return false; 6000 if (IsLoad()) return true; 6001 6002 if (lookup_.IsPropertyCallbacks()) return true; 6003 Handle<Map> map = this->map(); 6004 map->LookupTransition(NULL, *name_, &lookup_); 6005 if (lookup_.IsTransitionToField() && map->unused_property_fields() > 0) { 6006 // Construct the object field access. 6007 access_ = HObjectAccess::ForField(map, &lookup_, name_); 6008 6009 // Load field map for heap objects. 6010 LoadFieldMaps(transition()); 6011 return true; 6012 } 6013 return false; 6014 } 6015 6016 6017 bool HOptimizedGraphBuilder::PropertyAccessInfo::CanAccessAsMonomorphic( 6018 SmallMapList* types) { 6019 ASSERT(type_->Is(ToType(types->first()))); 6020 if (!CanAccessMonomorphic()) return false; 6021 STATIC_ASSERT(kMaxLoadPolymorphism == kMaxStorePolymorphism); 6022 if (types->length() > kMaxLoadPolymorphism) return false; 6023 6024 HObjectAccess access = HObjectAccess::ForMap(); // bogus default 6025 if (GetJSObjectFieldAccess(&access)) { 6026 for (int i = 1; i < types->length(); ++i) { 6027 PropertyAccessInfo test_info( 6028 builder_, access_type_, ToType(types->at(i)), name_); 6029 HObjectAccess test_access = HObjectAccess::ForMap(); // bogus default 6030 if (!test_info.GetJSObjectFieldAccess(&test_access)) return false; 6031 if (!access.Equals(test_access)) return false; 6032 } 6033 return true; 6034 } 6035 6036 // Currently only handle Type::Number as a polymorphic case. 6037 // TODO(verwaest): Support monomorphic handling of numbers with a HCheckNumber 6038 // instruction. 6039 if (type_->Is(Type::Number())) return false; 6040 6041 // Multiple maps cannot transition to the same target map. 6042 ASSERT(!IsLoad() || !lookup_.IsTransition()); 6043 if (lookup_.IsTransition() && types->length() > 1) return false; 6044 6045 for (int i = 1; i < types->length(); ++i) { 6046 PropertyAccessInfo test_info( 6047 builder_, access_type_, ToType(types->at(i)), name_); 6048 if (!test_info.IsCompatible(this)) return false; 6049 } 6050 6051 return true; 6052 } 6053 6054 6055 static bool NeedsWrappingFor(Type* type, Handle<JSFunction> target) { 6056 return type->Is(Type::NumberOrString()) && 6057 target->shared()->strict_mode() == SLOPPY && 6058 !target->shared()->native(); 6059 } 6060 6061 6062 HInstruction* HOptimizedGraphBuilder::BuildMonomorphicAccess( 6063 PropertyAccessInfo* info, 6064 HValue* object, 6065 HValue* checked_object, 6066 HValue* value, 6067 BailoutId ast_id, 6068 BailoutId return_id, 6069 bool can_inline_accessor) { 6070 6071 HObjectAccess access = HObjectAccess::ForMap(); // bogus default 6072 if (info->GetJSObjectFieldAccess(&access)) { 6073 ASSERT(info->IsLoad()); 6074 return New<HLoadNamedField>(object, checked_object, access); 6075 } 6076 6077 HValue* checked_holder = checked_object; 6078 if (info->has_holder()) { 6079 Handle<JSObject> prototype(JSObject::cast(info->map()->prototype())); 6080 checked_holder = BuildCheckPrototypeMaps(prototype, info->holder()); 6081 } 6082 6083 if (!info->lookup()->IsFound()) { 6084 ASSERT(info->IsLoad()); 6085 return graph()->GetConstantUndefined(); 6086 } 6087 6088 if (info->lookup()->IsField()) { 6089 if (info->IsLoad()) { 6090 return BuildLoadNamedField(info, checked_holder); 6091 } else { 6092 return BuildStoreNamedField(info, checked_object, value); 6093 } 6094 } 6095 6096 if (info->lookup()->IsTransition()) { 6097 ASSERT(!info->IsLoad()); 6098 return BuildStoreNamedField(info, checked_object, value); 6099 } 6100 6101 if (info->lookup()->IsPropertyCallbacks()) { 6102 Push(checked_object); 6103 int argument_count = 1; 6104 if (!info->IsLoad()) { 6105 argument_count = 2; 6106 Push(value); 6107 } 6108 6109 if (NeedsWrappingFor(info->type(), info->accessor())) { 6110 HValue* function = Add<HConstant>(info->accessor()); 6111 PushArgumentsFromEnvironment(argument_count); 6112 return New<HCallFunction>(function, argument_count, WRAP_AND_CALL); 6113 } else if (FLAG_inline_accessors && can_inline_accessor) { 6114 bool success = info->IsLoad() 6115 ? TryInlineGetter(info->accessor(), info->map(), ast_id, return_id) 6116 : TryInlineSetter( 6117 info->accessor(), info->map(), ast_id, return_id, value); 6118 if (success || HasStackOverflow()) return NULL; 6119 } 6120 6121 PushArgumentsFromEnvironment(argument_count); 6122 return BuildCallConstantFunction(info->accessor(), argument_count); 6123 } 6124 6125 ASSERT(info->lookup()->IsConstant()); 6126 if (info->IsLoad()) { 6127 return New<HConstant>(info->constant()); 6128 } else { 6129 return New<HCheckValue>(value, Handle<JSFunction>::cast(info->constant())); 6130 } 6131 } 6132 6133 6134 void HOptimizedGraphBuilder::HandlePolymorphicNamedFieldAccess( 6135 PropertyAccessType access_type, 6136 BailoutId ast_id, 6137 BailoutId return_id, 6138 HValue* object, 6139 HValue* value, 6140 SmallMapList* types, 6141 Handle<String> name) { 6142 // Something did not match; must use a polymorphic load. 6143 int count = 0; 6144 HBasicBlock* join = NULL; 6145 HBasicBlock* number_block = NULL; 6146 bool handled_string = false; 6147 6148 bool handle_smi = false; 6149 STATIC_ASSERT(kMaxLoadPolymorphism == kMaxStorePolymorphism); 6150 for (int i = 0; i < types->length() && count < kMaxLoadPolymorphism; ++i) { 6151 PropertyAccessInfo info(this, access_type, ToType(types->at(i)), name); 6152 if (info.type()->Is(Type::String())) { 6153 if (handled_string) continue; 6154 handled_string = true; 6155 } 6156 if (info.CanAccessMonomorphic()) { 6157 count++; 6158 if (info.type()->Is(Type::Number())) { 6159 handle_smi = true; 6160 break; 6161 } 6162 } 6163 } 6164 6165 count = 0; 6166 HControlInstruction* smi_check = NULL; 6167 handled_string = false; 6168 6169 for (int i = 0; i < types->length() && count < kMaxLoadPolymorphism; ++i) { 6170 PropertyAccessInfo info(this, access_type, ToType(types->at(i)), name); 6171 if (info.type()->Is(Type::String())) { 6172 if (handled_string) continue; 6173 handled_string = true; 6174 } 6175 if (!info.CanAccessMonomorphic()) continue; 6176 6177 if (count == 0) { 6178 join = graph()->CreateBasicBlock(); 6179 if (handle_smi) { 6180 HBasicBlock* empty_smi_block = graph()->CreateBasicBlock(); 6181 HBasicBlock* not_smi_block = graph()->CreateBasicBlock(); 6182 number_block = graph()->CreateBasicBlock(); 6183 smi_check = New<HIsSmiAndBranch>( 6184 object, empty_smi_block, not_smi_block); 6185 FinishCurrentBlock(smi_check); 6186 GotoNoSimulate(empty_smi_block, number_block); 6187 set_current_block(not_smi_block); 6188 } else { 6189 BuildCheckHeapObject(object); 6190 } 6191 } 6192 ++count; 6193 HBasicBlock* if_true = graph()->CreateBasicBlock(); 6194 HBasicBlock* if_false = graph()->CreateBasicBlock(); 6195 HUnaryControlInstruction* compare; 6196 6197 HValue* dependency; 6198 if (info.type()->Is(Type::Number())) { 6199 Handle<Map> heap_number_map = isolate()->factory()->heap_number_map(); 6200 compare = New<HCompareMap>(object, heap_number_map, if_true, if_false); 6201 dependency = smi_check; 6202 } else if (info.type()->Is(Type::String())) { 6203 compare = New<HIsStringAndBranch>(object, if_true, if_false); 6204 dependency = compare; 6205 } else { 6206 compare = New<HCompareMap>(object, info.map(), if_true, if_false); 6207 dependency = compare; 6208 } 6209 FinishCurrentBlock(compare); 6210 6211 if (info.type()->Is(Type::Number())) { 6212 GotoNoSimulate(if_true, number_block); 6213 if_true = number_block; 6214 } 6215 6216 set_current_block(if_true); 6217 6218 HInstruction* access = BuildMonomorphicAccess( 6219 &info, object, dependency, value, ast_id, 6220 return_id, FLAG_polymorphic_inlining); 6221 6222 HValue* result = NULL; 6223 switch (access_type) { 6224 case LOAD: 6225 result = access; 6226 break; 6227 case STORE: 6228 result = value; 6229 break; 6230 } 6231 6232 if (access == NULL) { 6233 if (HasStackOverflow()) return; 6234 } else { 6235 if (!access->IsLinked()) AddInstruction(access); 6236 if (!ast_context()->IsEffect()) Push(result); 6237 } 6238 6239 if (current_block() != NULL) Goto(join); 6240 set_current_block(if_false); 6241 } 6242 6243 // Finish up. Unconditionally deoptimize if we've handled all the maps we 6244 // know about and do not want to handle ones we've never seen. Otherwise 6245 // use a generic IC. 6246 if (count == types->length() && FLAG_deoptimize_uncommon_cases) { 6247 FinishExitWithHardDeoptimization("Uknown map in polymorphic access"); 6248 } else { 6249 HInstruction* instr = BuildNamedGeneric(access_type, object, name, value); 6250 AddInstruction(instr); 6251 if (!ast_context()->IsEffect()) Push(access_type == LOAD ? instr : value); 6252 6253 if (join != NULL) { 6254 Goto(join); 6255 } else { 6256 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE); 6257 if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop()); 6258 return; 6259 } 6260 } 6261 6262 ASSERT(join != NULL); 6263 if (join->HasPredecessor()) { 6264 join->SetJoinId(ast_id); 6265 set_current_block(join); 6266 if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop()); 6267 } else { 6268 set_current_block(NULL); 6269 } 6270 } 6271 6272 6273 static bool ComputeReceiverTypes(Expression* expr, 6274 HValue* receiver, 6275 SmallMapList** t, 6276 Zone* zone) { 6277 SmallMapList* types = expr->GetReceiverTypes(); 6278 *t = types; 6279 bool monomorphic = expr->IsMonomorphic(); 6280 if (types != NULL && receiver->HasMonomorphicJSObjectType()) { 6281 Map* root_map = receiver->GetMonomorphicJSObjectMap()->FindRootMap(); 6282 types->FilterForPossibleTransitions(root_map); 6283 monomorphic = types->length() == 1; 6284 } 6285 return monomorphic && CanInlinePropertyAccess( 6286 IC::MapToType<Type>(types->first(), zone)); 6287 } 6288 6289 6290 static bool AreStringTypes(SmallMapList* types) { 6291 for (int i = 0; i < types->length(); i++) { 6292 if (types->at(i)->instance_type() >= FIRST_NONSTRING_TYPE) return false; 6293 } 6294 return true; 6295 } 6296 6297 6298 void HOptimizedGraphBuilder::BuildStore(Expression* expr, 6299 Property* prop, 6300 BailoutId ast_id, 6301 BailoutId return_id, 6302 bool is_uninitialized) { 6303 if (!prop->key()->IsPropertyName()) { 6304 // Keyed store. 6305 HValue* value = environment()->ExpressionStackAt(0); 6306 HValue* key = environment()->ExpressionStackAt(1); 6307 HValue* object = environment()->ExpressionStackAt(2); 6308 bool has_side_effects = false; 6309 HandleKeyedElementAccess(object, key, value, expr, 6310 STORE, &has_side_effects); 6311 Drop(3); 6312 Push(value); 6313 Add<HSimulate>(return_id, REMOVABLE_SIMULATE); 6314 return ast_context()->ReturnValue(Pop()); 6315 } 6316 6317 // Named store. 6318 HValue* value = Pop(); 6319 HValue* object = Pop(); 6320 6321 Literal* key = prop->key()->AsLiteral(); 6322 Handle<String> name = Handle<String>::cast(key->value()); 6323 ASSERT(!name.is_null()); 6324 6325 HInstruction* instr = BuildNamedAccess(STORE, ast_id, return_id, expr, 6326 object, name, value, is_uninitialized); 6327 if (instr == NULL) return; 6328 6329 if (!ast_context()->IsEffect()) Push(value); 6330 AddInstruction(instr); 6331 if (instr->HasObservableSideEffects()) { 6332 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE); 6333 } 6334 if (!ast_context()->IsEffect()) Drop(1); 6335 return ast_context()->ReturnValue(value); 6336 } 6337 6338 6339 void HOptimizedGraphBuilder::HandlePropertyAssignment(Assignment* expr) { 6340 Property* prop = expr->target()->AsProperty(); 6341 ASSERT(prop != NULL); 6342 CHECK_ALIVE(VisitForValue(prop->obj())); 6343 if (!prop->key()->IsPropertyName()) { 6344 CHECK_ALIVE(VisitForValue(prop->key())); 6345 } 6346 CHECK_ALIVE(VisitForValue(expr->value())); 6347 BuildStore(expr, prop, expr->id(), 6348 expr->AssignmentId(), expr->IsUninitialized()); 6349 } 6350 6351 6352 // Because not every expression has a position and there is not common 6353 // superclass of Assignment and CountOperation, we cannot just pass the 6354 // owning expression instead of position and ast_id separately. 6355 void HOptimizedGraphBuilder::HandleGlobalVariableAssignment( 6356 Variable* var, 6357 HValue* value, 6358 BailoutId ast_id) { 6359 LookupResult lookup(isolate()); 6360 GlobalPropertyAccess type = LookupGlobalProperty(var, &lookup, STORE); 6361 if (type == kUseCell) { 6362 Handle<GlobalObject> global(current_info()->global_object()); 6363 Handle<PropertyCell> cell(global->GetPropertyCell(&lookup)); 6364 if (cell->type()->IsConstant()) { 6365 Handle<Object> constant = cell->type()->AsConstant()->Value(); 6366 if (value->IsConstant()) { 6367 HConstant* c_value = HConstant::cast(value); 6368 if (!constant.is_identical_to(c_value->handle(isolate()))) { 6369 Add<HDeoptimize>("Constant global variable assignment", 6370 Deoptimizer::EAGER); 6371 } 6372 } else { 6373 HValue* c_constant = Add<HConstant>(constant); 6374 IfBuilder builder(this); 6375 if (constant->IsNumber()) { 6376 builder.If<HCompareNumericAndBranch>(value, c_constant, Token::EQ); 6377 } else { 6378 builder.If<HCompareObjectEqAndBranch>(value, c_constant); 6379 } 6380 builder.Then(); 6381 builder.Else(); 6382 Add<HDeoptimize>("Constant global variable assignment", 6383 Deoptimizer::EAGER); 6384 builder.End(); 6385 } 6386 } 6387 HInstruction* instr = 6388 Add<HStoreGlobalCell>(value, cell, lookup.GetPropertyDetails()); 6389 if (instr->HasObservableSideEffects()) { 6390 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE); 6391 } 6392 } else { 6393 HValue* global_object = Add<HLoadNamedField>( 6394 context(), static_cast<HValue*>(NULL), 6395 HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX)); 6396 HStoreNamedGeneric* instr = 6397 Add<HStoreNamedGeneric>(global_object, var->name(), 6398 value, function_strict_mode()); 6399 USE(instr); 6400 ASSERT(instr->HasObservableSideEffects()); 6401 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE); 6402 } 6403 } 6404 6405 6406 void HOptimizedGraphBuilder::HandleCompoundAssignment(Assignment* expr) { 6407 Expression* target = expr->target(); 6408 VariableProxy* proxy = target->AsVariableProxy(); 6409 Property* prop = target->AsProperty(); 6410 ASSERT(proxy == NULL || prop == NULL); 6411 6412 // We have a second position recorded in the FullCodeGenerator to have 6413 // type feedback for the binary operation. 6414 BinaryOperation* operation = expr->binary_operation(); 6415 6416 if (proxy != NULL) { 6417 Variable* var = proxy->var(); 6418 if (var->mode() == LET) { 6419 return Bailout(kUnsupportedLetCompoundAssignment); 6420 } 6421 6422 CHECK_ALIVE(VisitForValue(operation)); 6423 6424 switch (var->location()) { 6425 case Variable::UNALLOCATED: 6426 HandleGlobalVariableAssignment(var, 6427 Top(), 6428 expr->AssignmentId()); 6429 break; 6430 6431 case Variable::PARAMETER: 6432 case Variable::LOCAL: 6433 if (var->mode() == CONST_LEGACY) { 6434 return Bailout(kUnsupportedConstCompoundAssignment); 6435 } 6436 BindIfLive(var, Top()); 6437 break; 6438 6439 case Variable::CONTEXT: { 6440 // Bail out if we try to mutate a parameter value in a function 6441 // using the arguments object. We do not (yet) correctly handle the 6442 // arguments property of the function. 6443 if (current_info()->scope()->arguments() != NULL) { 6444 // Parameters will be allocated to context slots. We have no 6445 // direct way to detect that the variable is a parameter so we do 6446 // a linear search of the parameter variables. 6447 int count = current_info()->scope()->num_parameters(); 6448 for (int i = 0; i < count; ++i) { 6449 if (var == current_info()->scope()->parameter(i)) { 6450 Bailout(kAssignmentToParameterFunctionUsesArgumentsObject); 6451 } 6452 } 6453 } 6454 6455 HStoreContextSlot::Mode mode; 6456 6457 switch (var->mode()) { 6458 case LET: 6459 mode = HStoreContextSlot::kCheckDeoptimize; 6460 break; 6461 case CONST: 6462 // This case is checked statically so no need to 6463 // perform checks here 6464 UNREACHABLE(); 6465 case CONST_LEGACY: 6466 return ast_context()->ReturnValue(Pop()); 6467 default: 6468 mode = HStoreContextSlot::kNoCheck; 6469 } 6470 6471 HValue* context = BuildContextChainWalk(var); 6472 HStoreContextSlot* instr = Add<HStoreContextSlot>( 6473 context, var->index(), mode, Top()); 6474 if (instr->HasObservableSideEffects()) { 6475 Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE); 6476 } 6477 break; 6478 } 6479 6480 case Variable::LOOKUP: 6481 return Bailout(kCompoundAssignmentToLookupSlot); 6482 } 6483 return ast_context()->ReturnValue(Pop()); 6484 6485 } else if (prop != NULL) { 6486 CHECK_ALIVE(VisitForValue(prop->obj())); 6487 HValue* object = Top(); 6488 HValue* key = NULL; 6489 if ((!prop->IsFunctionPrototype() && !prop->key()->IsPropertyName()) || 6490 prop->IsStringAccess()) { 6491 CHECK_ALIVE(VisitForValue(prop->key())); 6492 key = Top(); 6493 } 6494 6495 CHECK_ALIVE(PushLoad(prop, object, key)); 6496 6497 CHECK_ALIVE(VisitForValue(expr->value())); 6498 HValue* right = Pop(); 6499 HValue* left = Pop(); 6500 6501 Push(BuildBinaryOperation(operation, left, right, PUSH_BEFORE_SIMULATE)); 6502 6503 BuildStore(expr, prop, expr->id(), 6504 expr->AssignmentId(), expr->IsUninitialized()); 6505 } else { 6506 return Bailout(kInvalidLhsInCompoundAssignment); 6507 } 6508 } 6509 6510 6511 void HOptimizedGraphBuilder::VisitAssignment(Assignment* expr) { 6512 ASSERT(!HasStackOverflow()); 6513 ASSERT(current_block() != NULL); 6514 ASSERT(current_block()->HasPredecessor()); 6515 VariableProxy* proxy = expr->target()->AsVariableProxy(); 6516 Property* prop = expr->target()->AsProperty(); 6517 ASSERT(proxy == NULL || prop == NULL); 6518 6519 if (expr->is_compound()) { 6520 HandleCompoundAssignment(expr); 6521 return; 6522 } 6523 6524 if (prop != NULL) { 6525 HandlePropertyAssignment(expr); 6526 } else if (proxy != NULL) { 6527 Variable* var = proxy->var(); 6528 6529 if (var->mode() == CONST) { 6530 if (expr->op() != Token::INIT_CONST) { 6531 return Bailout(kNonInitializerAssignmentToConst); 6532 } 6533 } else if (var->mode() == CONST_LEGACY) { 6534 if (expr->op() != Token::INIT_CONST_LEGACY) { 6535 CHECK_ALIVE(VisitForValue(expr->value())); 6536 return ast_context()->ReturnValue(Pop()); 6537 } 6538 6539 if (var->IsStackAllocated()) { 6540 // We insert a use of the old value to detect unsupported uses of const 6541 // variables (e.g. initialization inside a loop). 6542 HValue* old_value = environment()->Lookup(var); 6543 Add<HUseConst>(old_value); 6544 } 6545 } 6546 6547 if (proxy->IsArguments()) return Bailout(kAssignmentToArguments); 6548 6549 // Handle the assignment. 6550 switch (var->location()) { 6551 case Variable::UNALLOCATED: 6552 CHECK_ALIVE(VisitForValue(expr->value())); 6553 HandleGlobalVariableAssignment(var, 6554 Top(), 6555 expr->AssignmentId()); 6556 return ast_context()->ReturnValue(Pop()); 6557 6558 case Variable::PARAMETER: 6559 case Variable::LOCAL: { 6560 // Perform an initialization check for let declared variables 6561 // or parameters. 6562 if (var->mode() == LET && expr->op() == Token::ASSIGN) { 6563 HValue* env_value = environment()->Lookup(var); 6564 if (env_value == graph()->GetConstantHole()) { 6565 return Bailout(kAssignmentToLetVariableBeforeInitialization); 6566 } 6567 } 6568 // We do not allow the arguments object to occur in a context where it 6569 // may escape, but assignments to stack-allocated locals are 6570 // permitted. 6571 CHECK_ALIVE(VisitForValue(expr->value(), ARGUMENTS_ALLOWED)); 6572 HValue* value = Pop(); 6573 BindIfLive(var, value); 6574 return ast_context()->ReturnValue(value); 6575 } 6576 6577 case Variable::CONTEXT: { 6578 // Bail out if we try to mutate a parameter value in a function using 6579 // the arguments object. We do not (yet) correctly handle the 6580 // arguments property of the function. 6581 if (current_info()->scope()->arguments() != NULL) { 6582 // Parameters will rewrite to context slots. We have no direct way 6583 // to detect that the variable is a parameter. 6584 int count = current_info()->scope()->num_parameters(); 6585 for (int i = 0; i < count; ++i) { 6586 if (var == current_info()->scope()->parameter(i)) { 6587 return Bailout(kAssignmentToParameterInArgumentsObject); 6588 } 6589 } 6590 } 6591 6592 CHECK_ALIVE(VisitForValue(expr->value())); 6593 HStoreContextSlot::Mode mode; 6594 if (expr->op() == Token::ASSIGN) { 6595 switch (var->mode()) { 6596 case LET: 6597 mode = HStoreContextSlot::kCheckDeoptimize; 6598 break; 6599 case CONST: 6600 // This case is checked statically so no need to 6601 // perform checks here 6602 UNREACHABLE(); 6603 case CONST_LEGACY: 6604 return ast_context()->ReturnValue(Pop()); 6605 default: 6606 mode = HStoreContextSlot::kNoCheck; 6607 } 6608 } else if (expr->op() == Token::INIT_VAR || 6609 expr->op() == Token::INIT_LET || 6610 expr->op() == Token::INIT_CONST) { 6611 mode = HStoreContextSlot::kNoCheck; 6612 } else { 6613 ASSERT(expr->op() == Token::INIT_CONST_LEGACY); 6614 6615 mode = HStoreContextSlot::kCheckIgnoreAssignment; 6616 } 6617 6618 HValue* context = BuildContextChainWalk(var); 6619 HStoreContextSlot* instr = Add<HStoreContextSlot>( 6620 context, var->index(), mode, Top()); 6621 if (instr->HasObservableSideEffects()) { 6622 Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE); 6623 } 6624 return ast_context()->ReturnValue(Pop()); 6625 } 6626 6627 case Variable::LOOKUP: 6628 return Bailout(kAssignmentToLOOKUPVariable); 6629 } 6630 } else { 6631 return Bailout(kInvalidLeftHandSideInAssignment); 6632 } 6633 } 6634 6635 6636 void HOptimizedGraphBuilder::VisitYield(Yield* expr) { 6637 // Generators are not optimized, so we should never get here. 6638 UNREACHABLE(); 6639 } 6640 6641 6642 void HOptimizedGraphBuilder::VisitThrow(Throw* expr) { 6643 ASSERT(!HasStackOverflow()); 6644 ASSERT(current_block() != NULL); 6645 ASSERT(current_block()->HasPredecessor()); 6646 // We don't optimize functions with invalid left-hand sides in 6647 // assignments, count operations, or for-in. Consequently throw can 6648 // currently only occur in an effect context. 6649 ASSERT(ast_context()->IsEffect()); 6650 CHECK_ALIVE(VisitForValue(expr->exception())); 6651 6652 HValue* value = environment()->Pop(); 6653 if (!FLAG_hydrogen_track_positions) SetSourcePosition(expr->position()); 6654 Add<HPushArguments>(value); 6655 Add<HCallRuntime>(isolate()->factory()->empty_string(), 6656 Runtime::FunctionForId(Runtime::kHiddenThrow), 1); 6657 Add<HSimulate>(expr->id()); 6658 6659 // If the throw definitely exits the function, we can finish with a dummy 6660 // control flow at this point. This is not the case if the throw is inside 6661 // an inlined function which may be replaced. 6662 if (call_context() == NULL) { 6663 FinishExitCurrentBlock(New<HAbnormalExit>()); 6664 } 6665 } 6666 6667 6668 HInstruction* HGraphBuilder::AddLoadStringInstanceType(HValue* string) { 6669 if (string->IsConstant()) { 6670 HConstant* c_string = HConstant::cast(string); 6671 if (c_string->HasStringValue()) { 6672 return Add<HConstant>(c_string->StringValue()->map()->instance_type()); 6673 } 6674 } 6675 return Add<HLoadNamedField>( 6676 Add<HLoadNamedField>(string, static_cast<HValue*>(NULL), 6677 HObjectAccess::ForMap()), 6678 static_cast<HValue*>(NULL), HObjectAccess::ForMapInstanceType()); 6679 } 6680 6681 6682 HInstruction* HGraphBuilder::AddLoadStringLength(HValue* string) { 6683 if (string->IsConstant()) { 6684 HConstant* c_string = HConstant::cast(string); 6685 if (c_string->HasStringValue()) { 6686 return Add<HConstant>(c_string->StringValue()->length()); 6687 } 6688 } 6689 return Add<HLoadNamedField>(string, static_cast<HValue*>(NULL), 6690 HObjectAccess::ForStringLength()); 6691 } 6692 6693 6694 HInstruction* HOptimizedGraphBuilder::BuildNamedGeneric( 6695 PropertyAccessType access_type, 6696 HValue* object, 6697 Handle<String> name, 6698 HValue* value, 6699 bool is_uninitialized) { 6700 if (is_uninitialized) { 6701 Add<HDeoptimize>("Insufficient type feedback for generic named access", 6702 Deoptimizer::SOFT); 6703 } 6704 if (access_type == LOAD) { 6705 return New<HLoadNamedGeneric>(object, name); 6706 } else { 6707 return New<HStoreNamedGeneric>(object, name, value, function_strict_mode()); 6708 } 6709 } 6710 6711 6712 6713 HInstruction* HOptimizedGraphBuilder::BuildKeyedGeneric( 6714 PropertyAccessType access_type, 6715 HValue* object, 6716 HValue* key, 6717 HValue* value) { 6718 if (access_type == LOAD) { 6719 return New<HLoadKeyedGeneric>(object, key); 6720 } else { 6721 return New<HStoreKeyedGeneric>(object, key, value, function_strict_mode()); 6722 } 6723 } 6724 6725 6726 LoadKeyedHoleMode HOptimizedGraphBuilder::BuildKeyedHoleMode(Handle<Map> map) { 6727 // Loads from a "stock" fast holey double arrays can elide the hole check. 6728 LoadKeyedHoleMode load_mode = NEVER_RETURN_HOLE; 6729 if (*map == isolate()->get_initial_js_array_map(FAST_HOLEY_DOUBLE_ELEMENTS) && 6730 isolate()->IsFastArrayConstructorPrototypeChainIntact()) { 6731 Handle<JSObject> prototype(JSObject::cast(map->prototype()), isolate()); 6732 Handle<JSObject> object_prototype = isolate()->initial_object_prototype(); 6733 BuildCheckPrototypeMaps(prototype, object_prototype); 6734 load_mode = ALLOW_RETURN_HOLE; 6735 graph()->MarkDependsOnEmptyArrayProtoElements(); 6736 } 6737 6738 return load_mode; 6739 } 6740 6741 6742 HInstruction* HOptimizedGraphBuilder::BuildMonomorphicElementAccess( 6743 HValue* object, 6744 HValue* key, 6745 HValue* val, 6746 HValue* dependency, 6747 Handle<Map> map, 6748 PropertyAccessType access_type, 6749 KeyedAccessStoreMode store_mode) { 6750 HCheckMaps* checked_object = Add<HCheckMaps>(object, map, dependency); 6751 if (dependency) { 6752 checked_object->ClearDependsOnFlag(kElementsKind); 6753 } 6754 6755 if (access_type == STORE && map->prototype()->IsJSObject()) { 6756 // monomorphic stores need a prototype chain check because shape 6757 // changes could allow callbacks on elements in the chain that 6758 // aren't compatible with monomorphic keyed stores. 6759 Handle<JSObject> prototype(JSObject::cast(map->prototype())); 6760 JSObject* holder = JSObject::cast(map->prototype()); 6761 while (!holder->GetPrototype()->IsNull()) { 6762 holder = JSObject::cast(holder->GetPrototype()); 6763 } 6764 6765 BuildCheckPrototypeMaps(prototype, 6766 Handle<JSObject>(JSObject::cast(holder))); 6767 } 6768 6769 LoadKeyedHoleMode load_mode = BuildKeyedHoleMode(map); 6770 return BuildUncheckedMonomorphicElementAccess( 6771 checked_object, key, val, 6772 map->instance_type() == JS_ARRAY_TYPE, 6773 map->elements_kind(), access_type, 6774 load_mode, store_mode); 6775 } 6776 6777 6778 HInstruction* HOptimizedGraphBuilder::TryBuildConsolidatedElementLoad( 6779 HValue* object, 6780 HValue* key, 6781 HValue* val, 6782 SmallMapList* maps) { 6783 // For polymorphic loads of similar elements kinds (i.e. all tagged or all 6784 // double), always use the "worst case" code without a transition. This is 6785 // much faster than transitioning the elements to the worst case, trading a 6786 // HTransitionElements for a HCheckMaps, and avoiding mutation of the array. 6787 bool has_double_maps = false; 6788 bool has_smi_or_object_maps = false; 6789 bool has_js_array_access = false; 6790 bool has_non_js_array_access = false; 6791 bool has_seen_holey_elements = false; 6792 Handle<Map> most_general_consolidated_map; 6793 for (int i = 0; i < maps->length(); ++i) { 6794 Handle<Map> map = maps->at(i); 6795 if (!map->IsJSObjectMap()) return NULL; 6796 // Don't allow mixing of JSArrays with JSObjects. 6797 if (map->instance_type() == JS_ARRAY_TYPE) { 6798 if (has_non_js_array_access) return NULL; 6799 has_js_array_access = true; 6800 } else if (has_js_array_access) { 6801 return NULL; 6802 } else { 6803 has_non_js_array_access = true; 6804 } 6805 // Don't allow mixed, incompatible elements kinds. 6806 if (map->has_fast_double_elements()) { 6807 if (has_smi_or_object_maps) return NULL; 6808 has_double_maps = true; 6809 } else if (map->has_fast_smi_or_object_elements()) { 6810 if (has_double_maps) return NULL; 6811 has_smi_or_object_maps = true; 6812 } else { 6813 return NULL; 6814 } 6815 // Remember if we've ever seen holey elements. 6816 if (IsHoleyElementsKind(map->elements_kind())) { 6817 has_seen_holey_elements = true; 6818 } 6819 // Remember the most general elements kind, the code for its load will 6820 // properly handle all of the more specific cases. 6821 if ((i == 0) || IsMoreGeneralElementsKindTransition( 6822 most_general_consolidated_map->elements_kind(), 6823 map->elements_kind())) { 6824 most_general_consolidated_map = map; 6825 } 6826 } 6827 if (!has_double_maps && !has_smi_or_object_maps) return NULL; 6828 6829 HCheckMaps* checked_object = Add<HCheckMaps>(object, maps); 6830 // FAST_ELEMENTS is considered more general than FAST_HOLEY_SMI_ELEMENTS. 6831 // If we've seen both, the consolidated load must use FAST_HOLEY_ELEMENTS. 6832 ElementsKind consolidated_elements_kind = has_seen_holey_elements 6833 ? GetHoleyElementsKind(most_general_consolidated_map->elements_kind()) 6834 : most_general_consolidated_map->elements_kind(); 6835 HInstruction* instr = BuildUncheckedMonomorphicElementAccess( 6836 checked_object, key, val, 6837 most_general_consolidated_map->instance_type() == JS_ARRAY_TYPE, 6838 consolidated_elements_kind, 6839 LOAD, NEVER_RETURN_HOLE, STANDARD_STORE); 6840 return instr; 6841 } 6842 6843 6844 HValue* HOptimizedGraphBuilder::HandlePolymorphicElementAccess( 6845 HValue* object, 6846 HValue* key, 6847 HValue* val, 6848 SmallMapList* maps, 6849 PropertyAccessType access_type, 6850 KeyedAccessStoreMode store_mode, 6851 bool* has_side_effects) { 6852 *has_side_effects = false; 6853 BuildCheckHeapObject(object); 6854 6855 if (access_type == LOAD) { 6856 HInstruction* consolidated_load = 6857 TryBuildConsolidatedElementLoad(object, key, val, maps); 6858 if (consolidated_load != NULL) { 6859 *has_side_effects |= consolidated_load->HasObservableSideEffects(); 6860 return consolidated_load; 6861 } 6862 } 6863 6864 // Elements_kind transition support. 6865 MapHandleList transition_target(maps->length()); 6866 // Collect possible transition targets. 6867 MapHandleList possible_transitioned_maps(maps->length()); 6868 for (int i = 0; i < maps->length(); ++i) { 6869 Handle<Map> map = maps->at(i); 6870 ElementsKind elements_kind = map->elements_kind(); 6871 if (IsFastElementsKind(elements_kind) && 6872 elements_kind != GetInitialFastElementsKind()) { 6873 possible_transitioned_maps.Add(map); 6874 } 6875 if (elements_kind == SLOPPY_ARGUMENTS_ELEMENTS) { 6876 HInstruction* result = BuildKeyedGeneric(access_type, object, key, val); 6877 *has_side_effects = result->HasObservableSideEffects(); 6878 return AddInstruction(result); 6879 } 6880 } 6881 // Get transition target for each map (NULL == no transition). 6882 for (int i = 0; i < maps->length(); ++i) { 6883 Handle<Map> map = maps->at(i); 6884 Handle<Map> transitioned_map = 6885 map->FindTransitionedMap(&possible_transitioned_maps); 6886 transition_target.Add(transitioned_map); 6887 } 6888 6889 MapHandleList untransitionable_maps(maps->length()); 6890 HTransitionElementsKind* transition = NULL; 6891 for (int i = 0; i < maps->length(); ++i) { 6892 Handle<Map> map = maps->at(i); 6893 ASSERT(map->IsMap()); 6894 if (!transition_target.at(i).is_null()) { 6895 ASSERT(Map::IsValidElementsTransition( 6896 map->elements_kind(), 6897 transition_target.at(i)->elements_kind())); 6898 transition = Add<HTransitionElementsKind>(object, map, 6899 transition_target.at(i)); 6900 } else { 6901 untransitionable_maps.Add(map); 6902 } 6903 } 6904 6905 // If only one map is left after transitioning, handle this case 6906 // monomorphically. 6907 ASSERT(untransitionable_maps.length() >= 1); 6908 if (untransitionable_maps.length() == 1) { 6909 Handle<Map> untransitionable_map = untransitionable_maps[0]; 6910 HInstruction* instr = NULL; 6911 if (untransitionable_map->has_slow_elements_kind() || 6912 !untransitionable_map->IsJSObjectMap()) { 6913 instr = AddInstruction(BuildKeyedGeneric(access_type, object, key, val)); 6914 } else { 6915 instr = BuildMonomorphicElementAccess( 6916 object, key, val, transition, untransitionable_map, access_type, 6917 store_mode); 6918 } 6919 *has_side_effects |= instr->HasObservableSideEffects(); 6920 return access_type == STORE ? NULL : instr; 6921 } 6922 6923 HBasicBlock* join = graph()->CreateBasicBlock(); 6924 6925 for (int i = 0; i < untransitionable_maps.length(); ++i) { 6926 Handle<Map> map = untransitionable_maps[i]; 6927 if (!map->IsJSObjectMap()) continue; 6928 ElementsKind elements_kind = map->elements_kind(); 6929 HBasicBlock* this_map = graph()->CreateBasicBlock(); 6930 HBasicBlock* other_map = graph()->CreateBasicBlock(); 6931 HCompareMap* mapcompare = 6932 New<HCompareMap>(object, map, this_map, other_map); 6933 FinishCurrentBlock(mapcompare); 6934 6935 set_current_block(this_map); 6936 HInstruction* access = NULL; 6937 if (IsDictionaryElementsKind(elements_kind)) { 6938 access = AddInstruction(BuildKeyedGeneric(access_type, object, key, val)); 6939 } else { 6940 ASSERT(IsFastElementsKind(elements_kind) || 6941 IsExternalArrayElementsKind(elements_kind) || 6942 IsFixedTypedArrayElementsKind(elements_kind)); 6943 LoadKeyedHoleMode load_mode = BuildKeyedHoleMode(map); 6944 // Happily, mapcompare is a checked object. 6945 access = BuildUncheckedMonomorphicElementAccess( 6946 mapcompare, key, val, 6947 map->instance_type() == JS_ARRAY_TYPE, 6948 elements_kind, access_type, 6949 load_mode, 6950 store_mode); 6951 } 6952 *has_side_effects |= access->HasObservableSideEffects(); 6953 // The caller will use has_side_effects and add a correct Simulate. 6954 access->SetFlag(HValue::kHasNoObservableSideEffects); 6955 if (access_type == LOAD) { 6956 Push(access); 6957 } 6958 NoObservableSideEffectsScope scope(this); 6959 GotoNoSimulate(join); 6960 set_current_block(other_map); 6961 } 6962 6963 // Ensure that we visited at least one map above that goes to join. This is 6964 // necessary because FinishExitWithHardDeoptimization does an AbnormalExit 6965 // rather than joining the join block. If this becomes an issue, insert a 6966 // generic access in the case length() == 0. 6967 ASSERT(join->predecessors()->length() > 0); 6968 // Deopt if none of the cases matched. 6969 NoObservableSideEffectsScope scope(this); 6970 FinishExitWithHardDeoptimization("Unknown map in polymorphic element access"); 6971 set_current_block(join); 6972 return access_type == STORE ? NULL : Pop(); 6973 } 6974 6975 6976 HValue* HOptimizedGraphBuilder::HandleKeyedElementAccess( 6977 HValue* obj, 6978 HValue* key, 6979 HValue* val, 6980 Expression* expr, 6981 PropertyAccessType access_type, 6982 bool* has_side_effects) { 6983 ASSERT(!expr->IsPropertyName()); 6984 HInstruction* instr = NULL; 6985 6986 SmallMapList* types; 6987 bool monomorphic = ComputeReceiverTypes(expr, obj, &types, zone()); 6988 6989 bool force_generic = false; 6990 if (access_type == STORE && 6991 (monomorphic || (types != NULL && !types->is_empty()))) { 6992 // Stores can't be mono/polymorphic if their prototype chain has dictionary 6993 // elements. However a receiver map that has dictionary elements itself 6994 // should be left to normal mono/poly behavior (the other maps may benefit 6995 // from highly optimized stores). 6996 for (int i = 0; i < types->length(); i++) { 6997 Handle<Map> current_map = types->at(i); 6998 if (current_map->DictionaryElementsInPrototypeChainOnly()) { 6999 force_generic = true; 7000 monomorphic = false; 7001 break; 7002 } 7003 } 7004 } 7005 7006 if (monomorphic) { 7007 Handle<Map> map = types->first(); 7008 if (map->has_slow_elements_kind() || !map->IsJSObjectMap()) { 7009 instr = AddInstruction(BuildKeyedGeneric(access_type, obj, key, val)); 7010 } else { 7011 BuildCheckHeapObject(obj); 7012 instr = BuildMonomorphicElementAccess( 7013 obj, key, val, NULL, map, access_type, expr->GetStoreMode()); 7014 } 7015 } else if (!force_generic && (types != NULL && !types->is_empty())) { 7016 return HandlePolymorphicElementAccess( 7017 obj, key, val, types, access_type, 7018 expr->GetStoreMode(), has_side_effects); 7019 } else { 7020 if (access_type == STORE) { 7021 if (expr->IsAssignment() && 7022 expr->AsAssignment()->HasNoTypeInformation()) { 7023 Add<HDeoptimize>("Insufficient type feedback for keyed store", 7024 Deoptimizer::SOFT); 7025 } 7026 } else { 7027 if (expr->AsProperty()->HasNoTypeInformation()) { 7028 Add<HDeoptimize>("Insufficient type feedback for keyed load", 7029 Deoptimizer::SOFT); 7030 } 7031 } 7032 instr = AddInstruction(BuildKeyedGeneric(access_type, obj, key, val)); 7033 } 7034 *has_side_effects = instr->HasObservableSideEffects(); 7035 return instr; 7036 } 7037 7038 7039 void HOptimizedGraphBuilder::EnsureArgumentsArePushedForAccess() { 7040 // Outermost function already has arguments on the stack. 7041 if (function_state()->outer() == NULL) return; 7042 7043 if (function_state()->arguments_pushed()) return; 7044 7045 // Push arguments when entering inlined function. 7046 HEnterInlined* entry = function_state()->entry(); 7047 entry->set_arguments_pushed(); 7048 7049 HArgumentsObject* arguments = entry->arguments_object(); 7050 const ZoneList<HValue*>* arguments_values = arguments->arguments_values(); 7051 7052 HInstruction* insert_after = entry; 7053 for (int i = 0; i < arguments_values->length(); i++) { 7054 HValue* argument = arguments_values->at(i); 7055 HInstruction* push_argument = New<HPushArguments>(argument); 7056 push_argument->InsertAfter(insert_after); 7057 insert_after = push_argument; 7058 } 7059 7060 HArgumentsElements* arguments_elements = New<HArgumentsElements>(true); 7061 arguments_elements->ClearFlag(HValue::kUseGVN); 7062 arguments_elements->InsertAfter(insert_after); 7063 function_state()->set_arguments_elements(arguments_elements); 7064 } 7065 7066 7067 bool HOptimizedGraphBuilder::TryArgumentsAccess(Property* expr) { 7068 VariableProxy* proxy = expr->obj()->AsVariableProxy(); 7069 if (proxy == NULL) return false; 7070 if (!proxy->var()->IsStackAllocated()) return false; 7071 if (!environment()->Lookup(proxy->var())->CheckFlag(HValue::kIsArguments)) { 7072 return false; 7073 } 7074 7075 HInstruction* result = NULL; 7076 if (expr->key()->IsPropertyName()) { 7077 Handle<String> name = expr->key()->AsLiteral()->AsPropertyName(); 7078 if (!name->IsOneByteEqualTo(STATIC_ASCII_VECTOR("length"))) return false; 7079 7080 if (function_state()->outer() == NULL) { 7081 HInstruction* elements = Add<HArgumentsElements>(false); 7082 result = New<HArgumentsLength>(elements); 7083 } else { 7084 // Number of arguments without receiver. 7085 int argument_count = environment()-> 7086 arguments_environment()->parameter_count() - 1; 7087 result = New<HConstant>(argument_count); 7088 } 7089 } else { 7090 Push(graph()->GetArgumentsObject()); 7091 CHECK_ALIVE_OR_RETURN(VisitForValue(expr->key()), true); 7092 HValue* key = Pop(); 7093 Drop(1); // Arguments object. 7094 if (function_state()->outer() == NULL) { 7095 HInstruction* elements = Add<HArgumentsElements>(false); 7096 HInstruction* length = Add<HArgumentsLength>(elements); 7097 HInstruction* checked_key = Add<HBoundsCheck>(key, length); 7098 result = New<HAccessArgumentsAt>(elements, length, checked_key); 7099 } else { 7100 EnsureArgumentsArePushedForAccess(); 7101 7102 // Number of arguments without receiver. 7103 HInstruction* elements = function_state()->arguments_elements(); 7104 int argument_count = environment()-> 7105 arguments_environment()->parameter_count() - 1; 7106 HInstruction* length = Add<HConstant>(argument_count); 7107 HInstruction* checked_key = Add<HBoundsCheck>(key, length); 7108 result = New<HAccessArgumentsAt>(elements, length, checked_key); 7109 } 7110 } 7111 ast_context()->ReturnInstruction(result, expr->id()); 7112 return true; 7113 } 7114 7115 7116 HInstruction* HOptimizedGraphBuilder::BuildNamedAccess( 7117 PropertyAccessType access, 7118 BailoutId ast_id, 7119 BailoutId return_id, 7120 Expression* expr, 7121 HValue* object, 7122 Handle<String> name, 7123 HValue* value, 7124 bool is_uninitialized) { 7125 SmallMapList* types; 7126 ComputeReceiverTypes(expr, object, &types, zone()); 7127 ASSERT(types != NULL); 7128 7129 if (types->length() > 0) { 7130 PropertyAccessInfo info(this, access, ToType(types->first()), name); 7131 if (!info.CanAccessAsMonomorphic(types)) { 7132 HandlePolymorphicNamedFieldAccess( 7133 access, ast_id, return_id, object, value, types, name); 7134 return NULL; 7135 } 7136 7137 HValue* checked_object; 7138 // Type::Number() is only supported by polymorphic load/call handling. 7139 ASSERT(!info.type()->Is(Type::Number())); 7140 BuildCheckHeapObject(object); 7141 if (AreStringTypes(types)) { 7142 checked_object = 7143 Add<HCheckInstanceType>(object, HCheckInstanceType::IS_STRING); 7144 } else { 7145 checked_object = Add<HCheckMaps>(object, types); 7146 } 7147 return BuildMonomorphicAccess( 7148 &info, object, checked_object, value, ast_id, return_id); 7149 } 7150 7151 return BuildNamedGeneric(access, object, name, value, is_uninitialized); 7152 } 7153 7154 7155 void HOptimizedGraphBuilder::PushLoad(Property* expr, 7156 HValue* object, 7157 HValue* key) { 7158 ValueContext for_value(this, ARGUMENTS_NOT_ALLOWED); 7159 Push(object); 7160 if (key != NULL) Push(key); 7161 BuildLoad(expr, expr->LoadId()); 7162 } 7163 7164 7165 void HOptimizedGraphBuilder::BuildLoad(Property* expr, 7166 BailoutId ast_id) { 7167 HInstruction* instr = NULL; 7168 if (expr->IsStringAccess()) { 7169 HValue* index = Pop(); 7170 HValue* string = Pop(); 7171 HInstruction* char_code = BuildStringCharCodeAt(string, index); 7172 AddInstruction(char_code); 7173 instr = NewUncasted<HStringCharFromCode>(char_code); 7174 7175 } else if (expr->IsFunctionPrototype()) { 7176 HValue* function = Pop(); 7177 BuildCheckHeapObject(function); 7178 instr = New<HLoadFunctionPrototype>(function); 7179 7180 } else if (expr->key()->IsPropertyName()) { 7181 Handle<String> name = expr->key()->AsLiteral()->AsPropertyName(); 7182 HValue* object = Pop(); 7183 7184 instr = BuildNamedAccess(LOAD, ast_id, expr->LoadId(), expr, 7185 object, name, NULL, expr->IsUninitialized()); 7186 if (instr == NULL) return; 7187 if (instr->IsLinked()) return ast_context()->ReturnValue(instr); 7188 7189 } else { 7190 HValue* key = Pop(); 7191 HValue* obj = Pop(); 7192 7193 bool has_side_effects = false; 7194 HValue* load = HandleKeyedElementAccess( 7195 obj, key, NULL, expr, LOAD, &has_side_effects); 7196 if (has_side_effects) { 7197 if (ast_context()->IsEffect()) { 7198 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE); 7199 } else { 7200 Push(load); 7201 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE); 7202 Drop(1); 7203 } 7204 } 7205 return ast_context()->ReturnValue(load); 7206 } 7207 return ast_context()->ReturnInstruction(instr, ast_id); 7208 } 7209 7210 7211 void HOptimizedGraphBuilder::VisitProperty(Property* expr) { 7212 ASSERT(!HasStackOverflow()); 7213 ASSERT(current_block() != NULL); 7214 ASSERT(current_block()->HasPredecessor()); 7215 7216 if (TryArgumentsAccess(expr)) return; 7217 7218 CHECK_ALIVE(VisitForValue(expr->obj())); 7219 if ((!expr->IsFunctionPrototype() && !expr->key()->IsPropertyName()) || 7220 expr->IsStringAccess()) { 7221 CHECK_ALIVE(VisitForValue(expr->key())); 7222 } 7223 7224 BuildLoad(expr, expr->id()); 7225 } 7226 7227 7228 HInstruction* HGraphBuilder::BuildConstantMapCheck(Handle<JSObject> constant) { 7229 HCheckMaps* check = Add<HCheckMaps>( 7230 Add<HConstant>(constant), handle(constant->map())); 7231 check->ClearDependsOnFlag(kElementsKind); 7232 return check; 7233 } 7234 7235 7236 HInstruction* HGraphBuilder::BuildCheckPrototypeMaps(Handle<JSObject> prototype, 7237 Handle<JSObject> holder) { 7238 while (holder.is_null() || !prototype.is_identical_to(holder)) { 7239 BuildConstantMapCheck(prototype); 7240 Object* next_prototype = prototype->GetPrototype(); 7241 if (next_prototype->IsNull()) return NULL; 7242 CHECK(next_prototype->IsJSObject()); 7243 prototype = handle(JSObject::cast(next_prototype)); 7244 } 7245 return BuildConstantMapCheck(prototype); 7246 } 7247 7248 7249 void HOptimizedGraphBuilder::AddCheckPrototypeMaps(Handle<JSObject> holder, 7250 Handle<Map> receiver_map) { 7251 if (!holder.is_null()) { 7252 Handle<JSObject> prototype(JSObject::cast(receiver_map->prototype())); 7253 BuildCheckPrototypeMaps(prototype, holder); 7254 } 7255 } 7256 7257 7258 HInstruction* HOptimizedGraphBuilder::NewPlainFunctionCall( 7259 HValue* fun, int argument_count, bool pass_argument_count) { 7260 return New<HCallJSFunction>( 7261 fun, argument_count, pass_argument_count); 7262 } 7263 7264 7265 HInstruction* HOptimizedGraphBuilder::NewArgumentAdaptorCall( 7266 HValue* fun, HValue* context, 7267 int argument_count, HValue* expected_param_count) { 7268 CallInterfaceDescriptor* descriptor = 7269 isolate()->call_descriptor(Isolate::ArgumentAdaptorCall); 7270 7271 HValue* arity = Add<HConstant>(argument_count - 1); 7272 7273 HValue* op_vals[] = { fun, context, arity, expected_param_count }; 7274 7275 Handle<Code> adaptor = 7276 isolate()->builtins()->ArgumentsAdaptorTrampoline(); 7277 HConstant* adaptor_value = Add<HConstant>(adaptor); 7278 7279 return New<HCallWithDescriptor>( 7280 adaptor_value, argument_count, descriptor, 7281 Vector<HValue*>(op_vals, descriptor->environment_length())); 7282 } 7283 7284 7285 HInstruction* HOptimizedGraphBuilder::BuildCallConstantFunction( 7286 Handle<JSFunction> jsfun, int argument_count) { 7287 HValue* target = Add<HConstant>(jsfun); 7288 // For constant functions, we try to avoid calling the 7289 // argument adaptor and instead call the function directly 7290 int formal_parameter_count = jsfun->shared()->formal_parameter_count(); 7291 bool dont_adapt_arguments = 7292 (formal_parameter_count == 7293 SharedFunctionInfo::kDontAdaptArgumentsSentinel); 7294 int arity = argument_count - 1; 7295 bool can_invoke_directly = 7296 dont_adapt_arguments || formal_parameter_count == arity; 7297 if (can_invoke_directly) { 7298 if (jsfun.is_identical_to(current_info()->closure())) { 7299 graph()->MarkRecursive(); 7300 } 7301 return NewPlainFunctionCall(target, argument_count, dont_adapt_arguments); 7302 } else { 7303 HValue* param_count_value = Add<HConstant>(formal_parameter_count); 7304 HValue* context = Add<HLoadNamedField>( 7305 target, static_cast<HValue*>(NULL), 7306 HObjectAccess::ForFunctionContextPointer()); 7307 return NewArgumentAdaptorCall(target, context, 7308 argument_count, param_count_value); 7309 } 7310 UNREACHABLE(); 7311 return NULL; 7312 } 7313 7314 7315 class FunctionSorter { 7316 public: 7317 FunctionSorter(int index = 0, int ticks = 0, int size = 0) 7318 : index_(index), ticks_(ticks), size_(size) { } 7319 7320 int index() const { return index_; } 7321 int ticks() const { return ticks_; } 7322 int size() const { return size_; } 7323 7324 private: 7325 int index_; 7326 int ticks_; 7327 int size_; 7328 }; 7329 7330 7331 inline bool operator<(const FunctionSorter& lhs, const FunctionSorter& rhs) { 7332 int diff = lhs.ticks() - rhs.ticks(); 7333 if (diff != 0) return diff > 0; 7334 return lhs.size() < rhs.size(); 7335 } 7336 7337 7338 void HOptimizedGraphBuilder::HandlePolymorphicCallNamed( 7339 Call* expr, 7340 HValue* receiver, 7341 SmallMapList* types, 7342 Handle<String> name) { 7343 int argument_count = expr->arguments()->length() + 1; // Includes receiver. 7344 FunctionSorter order[kMaxCallPolymorphism]; 7345 7346 bool handle_smi = false; 7347 bool handled_string = false; 7348 int ordered_functions = 0; 7349 7350 for (int i = 0; 7351 i < types->length() && ordered_functions < kMaxCallPolymorphism; 7352 ++i) { 7353 PropertyAccessInfo info(this, LOAD, ToType(types->at(i)), name); 7354 if (info.CanAccessMonomorphic() && 7355 info.lookup()->IsConstant() && 7356 info.constant()->IsJSFunction()) { 7357 if (info.type()->Is(Type::String())) { 7358 if (handled_string) continue; 7359 handled_string = true; 7360 } 7361 Handle<JSFunction> target = Handle<JSFunction>::cast(info.constant()); 7362 if (info.type()->Is(Type::Number())) { 7363 handle_smi = true; 7364 } 7365 expr->set_target(target); 7366 order[ordered_functions++] = FunctionSorter( 7367 i, target->shared()->profiler_ticks(), InliningAstSize(target)); 7368 } 7369 } 7370 7371 std::sort(order, order + ordered_functions); 7372 7373 HBasicBlock* number_block = NULL; 7374 HBasicBlock* join = NULL; 7375 handled_string = false; 7376 int count = 0; 7377 7378 for (int fn = 0; fn < ordered_functions; ++fn) { 7379 int i = order[fn].index(); 7380 PropertyAccessInfo info(this, LOAD, ToType(types->at(i)), name); 7381 if (info.type()->Is(Type::String())) { 7382 if (handled_string) continue; 7383 handled_string = true; 7384 } 7385 // Reloads the target. 7386 info.CanAccessMonomorphic(); 7387 Handle<JSFunction> target = Handle<JSFunction>::cast(info.constant()); 7388 7389 expr->set_target(target); 7390 if (count == 0) { 7391 // Only needed once. 7392 join = graph()->CreateBasicBlock(); 7393 if (handle_smi) { 7394 HBasicBlock* empty_smi_block = graph()->CreateBasicBlock(); 7395 HBasicBlock* not_smi_block = graph()->CreateBasicBlock(); 7396 number_block = graph()->CreateBasicBlock(); 7397 FinishCurrentBlock(New<HIsSmiAndBranch>( 7398 receiver, empty_smi_block, not_smi_block)); 7399 GotoNoSimulate(empty_smi_block, number_block); 7400 set_current_block(not_smi_block); 7401 } else { 7402 BuildCheckHeapObject(receiver); 7403 } 7404 } 7405 ++count; 7406 HBasicBlock* if_true = graph()->CreateBasicBlock(); 7407 HBasicBlock* if_false = graph()->CreateBasicBlock(); 7408 HUnaryControlInstruction* compare; 7409 7410 Handle<Map> map = info.map(); 7411 if (info.type()->Is(Type::Number())) { 7412 Handle<Map> heap_number_map = isolate()->factory()->heap_number_map(); 7413 compare = New<HCompareMap>(receiver, heap_number_map, if_true, if_false); 7414 } else if (info.type()->Is(Type::String())) { 7415 compare = New<HIsStringAndBranch>(receiver, if_true, if_false); 7416 } else { 7417 compare = New<HCompareMap>(receiver, map, if_true, if_false); 7418 } 7419 FinishCurrentBlock(compare); 7420 7421 if (info.type()->Is(Type::Number())) { 7422 GotoNoSimulate(if_true, number_block); 7423 if_true = number_block; 7424 } 7425 7426 set_current_block(if_true); 7427 7428 AddCheckPrototypeMaps(info.holder(), map); 7429 7430 HValue* function = Add<HConstant>(expr->target()); 7431 environment()->SetExpressionStackAt(0, function); 7432 Push(receiver); 7433 CHECK_ALIVE(VisitExpressions(expr->arguments())); 7434 bool needs_wrapping = NeedsWrappingFor(info.type(), target); 7435 bool try_inline = FLAG_polymorphic_inlining && !needs_wrapping; 7436 if (FLAG_trace_inlining && try_inline) { 7437 Handle<JSFunction> caller = current_info()->closure(); 7438 SmartArrayPointer<char> caller_name = 7439 caller->shared()->DebugName()->ToCString(); 7440 PrintF("Trying to inline the polymorphic call to %s from %s\n", 7441 name->ToCString().get(), 7442 caller_name.get()); 7443 } 7444 if (try_inline && TryInlineCall(expr)) { 7445 // Trying to inline will signal that we should bailout from the 7446 // entire compilation by setting stack overflow on the visitor. 7447 if (HasStackOverflow()) return; 7448 } else { 7449 // Since HWrapReceiver currently cannot actually wrap numbers and strings, 7450 // use the regular CallFunctionStub for method calls to wrap the receiver. 7451 // TODO(verwaest): Support creation of value wrappers directly in 7452 // HWrapReceiver. 7453 HInstruction* call = needs_wrapping 7454 ? NewUncasted<HCallFunction>( 7455 function, argument_count, WRAP_AND_CALL) 7456 : BuildCallConstantFunction(target, argument_count); 7457 PushArgumentsFromEnvironment(argument_count); 7458 AddInstruction(call); 7459 Drop(1); // Drop the function. 7460 if (!ast_context()->IsEffect()) Push(call); 7461 } 7462 7463 if (current_block() != NULL) Goto(join); 7464 set_current_block(if_false); 7465 } 7466 7467 // Finish up. Unconditionally deoptimize if we've handled all the maps we 7468 // know about and do not want to handle ones we've never seen. Otherwise 7469 // use a generic IC. 7470 if (ordered_functions == types->length() && FLAG_deoptimize_uncommon_cases) { 7471 FinishExitWithHardDeoptimization("Unknown map in polymorphic call"); 7472 } else { 7473 Property* prop = expr->expression()->AsProperty(); 7474 HInstruction* function = BuildNamedGeneric( 7475 LOAD, receiver, name, NULL, prop->IsUninitialized()); 7476 AddInstruction(function); 7477 Push(function); 7478 AddSimulate(prop->LoadId(), REMOVABLE_SIMULATE); 7479 7480 environment()->SetExpressionStackAt(1, function); 7481 environment()->SetExpressionStackAt(0, receiver); 7482 CHECK_ALIVE(VisitExpressions(expr->arguments())); 7483 7484 CallFunctionFlags flags = receiver->type().IsJSObject() 7485 ? NO_CALL_FUNCTION_FLAGS : CALL_AS_METHOD; 7486 HInstruction* call = New<HCallFunction>( 7487 function, argument_count, flags); 7488 7489 PushArgumentsFromEnvironment(argument_count); 7490 7491 Drop(1); // Function. 7492 7493 if (join != NULL) { 7494 AddInstruction(call); 7495 if (!ast_context()->IsEffect()) Push(call); 7496 Goto(join); 7497 } else { 7498 return ast_context()->ReturnInstruction(call, expr->id()); 7499 } 7500 } 7501 7502 // We assume that control flow is always live after an expression. So 7503 // even without predecessors to the join block, we set it as the exit 7504 // block and continue by adding instructions there. 7505 ASSERT(join != NULL); 7506 if (join->HasPredecessor()) { 7507 set_current_block(join); 7508 join->SetJoinId(expr->id()); 7509 if (!ast_context()->IsEffect()) return ast_context()->ReturnValue(Pop()); 7510 } else { 7511 set_current_block(NULL); 7512 } 7513 } 7514 7515 7516 void HOptimizedGraphBuilder::TraceInline(Handle<JSFunction> target, 7517 Handle<JSFunction> caller, 7518 const char* reason) { 7519 if (FLAG_trace_inlining) { 7520 SmartArrayPointer<char> target_name = 7521 target->shared()->DebugName()->ToCString(); 7522 SmartArrayPointer<char> caller_name = 7523 caller->shared()->DebugName()->ToCString(); 7524 if (reason == NULL) { 7525 PrintF("Inlined %s called from %s.\n", target_name.get(), 7526 caller_name.get()); 7527 } else { 7528 PrintF("Did not inline %s called from %s (%s).\n", 7529 target_name.get(), caller_name.get(), reason); 7530 } 7531 } 7532 } 7533 7534 7535 static const int kNotInlinable = 1000000000; 7536 7537 7538 int HOptimizedGraphBuilder::InliningAstSize(Handle<JSFunction> target) { 7539 if (!FLAG_use_inlining) return kNotInlinable; 7540 7541 // Precondition: call is monomorphic and we have found a target with the 7542 // appropriate arity. 7543 Handle<JSFunction> caller = current_info()->closure(); 7544 Handle<SharedFunctionInfo> target_shared(target->shared()); 7545 7546 // Always inline builtins marked for inlining. 7547 if (target->IsBuiltin()) { 7548 return target_shared->inline_builtin() ? 0 : kNotInlinable; 7549 } 7550 7551 if (target_shared->IsApiFunction()) { 7552 TraceInline(target, caller, "target is api function"); 7553 return kNotInlinable; 7554 } 7555 7556 // Do a quick check on source code length to avoid parsing large 7557 // inlining candidates. 7558 if (target_shared->SourceSize() > 7559 Min(FLAG_max_inlined_source_size, kUnlimitedMaxInlinedSourceSize)) { 7560 TraceInline(target, caller, "target text too big"); 7561 return kNotInlinable; 7562 } 7563 7564 // Target must be inlineable. 7565 if (!target_shared->IsInlineable()) { 7566 TraceInline(target, caller, "target not inlineable"); 7567 return kNotInlinable; 7568 } 7569 if (target_shared->dont_inline() || target_shared->dont_optimize()) { 7570 TraceInline(target, caller, "target contains unsupported syntax [early]"); 7571 return kNotInlinable; 7572 } 7573 7574 int nodes_added = target_shared->ast_node_count(); 7575 return nodes_added; 7576 } 7577 7578 7579 bool HOptimizedGraphBuilder::TryInline(Handle<JSFunction> target, 7580 int arguments_count, 7581 HValue* implicit_return_value, 7582 BailoutId ast_id, 7583 BailoutId return_id, 7584 InliningKind inlining_kind, 7585 HSourcePosition position) { 7586 int nodes_added = InliningAstSize(target); 7587 if (nodes_added == kNotInlinable) return false; 7588 7589 Handle<JSFunction> caller = current_info()->closure(); 7590 7591 if (nodes_added > Min(FLAG_max_inlined_nodes, kUnlimitedMaxInlinedNodes)) { 7592 TraceInline(target, caller, "target AST is too large [early]"); 7593 return false; 7594 } 7595 7596 // Don't inline deeper than the maximum number of inlining levels. 7597 HEnvironment* env = environment(); 7598 int current_level = 1; 7599 while (env->outer() != NULL) { 7600 if (current_level == FLAG_max_inlining_levels) { 7601 TraceInline(target, caller, "inline depth limit reached"); 7602 return false; 7603 } 7604 if (env->outer()->frame_type() == JS_FUNCTION) { 7605 current_level++; 7606 } 7607 env = env->outer(); 7608 } 7609 7610 // Don't inline recursive functions. 7611 for (FunctionState* state = function_state(); 7612 state != NULL; 7613 state = state->outer()) { 7614 if (*state->compilation_info()->closure() == *target) { 7615 TraceInline(target, caller, "target is recursive"); 7616 return false; 7617 } 7618 } 7619 7620 // We don't want to add more than a certain number of nodes from inlining. 7621 if (inlined_count_ > Min(FLAG_max_inlined_nodes_cumulative, 7622 kUnlimitedMaxInlinedNodesCumulative)) { 7623 TraceInline(target, caller, "cumulative AST node limit reached"); 7624 return false; 7625 } 7626 7627 // Parse and allocate variables. 7628 CompilationInfo target_info(target, zone()); 7629 Handle<SharedFunctionInfo> target_shared(target->shared()); 7630 if (!Parser::Parse(&target_info) || !Scope::Analyze(&target_info)) { 7631 if (target_info.isolate()->has_pending_exception()) { 7632 // Parse or scope error, never optimize this function. 7633 SetStackOverflow(); 7634 target_shared->DisableOptimization(kParseScopeError); 7635 } 7636 TraceInline(target, caller, "parse failure"); 7637 return false; 7638 } 7639 7640 if (target_info.scope()->num_heap_slots() > 0) { 7641 TraceInline(target, caller, "target has context-allocated variables"); 7642 return false; 7643 } 7644 FunctionLiteral* function = target_info.function(); 7645 7646 // The following conditions must be checked again after re-parsing, because 7647 // earlier the information might not have been complete due to lazy parsing. 7648 nodes_added = function->ast_node_count(); 7649 if (nodes_added > Min(FLAG_max_inlined_nodes, kUnlimitedMaxInlinedNodes)) { 7650 TraceInline(target, caller, "target AST is too large [late]"); 7651 return false; 7652 } 7653 AstProperties::Flags* flags(function->flags()); 7654 if (flags->Contains(kDontInline) || function->dont_optimize()) { 7655 TraceInline(target, caller, "target contains unsupported syntax [late]"); 7656 return false; 7657 } 7658 7659 // If the function uses the arguments object check that inlining of functions 7660 // with arguments object is enabled and the arguments-variable is 7661 // stack allocated. 7662 if (function->scope()->arguments() != NULL) { 7663 if (!FLAG_inline_arguments) { 7664 TraceInline(target, caller, "target uses arguments object"); 7665 return false; 7666 } 7667 7668 if (!function->scope()->arguments()->IsStackAllocated()) { 7669 TraceInline(target, 7670 caller, 7671 "target uses non-stackallocated arguments object"); 7672 return false; 7673 } 7674 } 7675 7676 // All declarations must be inlineable. 7677 ZoneList<Declaration*>* decls = target_info.scope()->declarations(); 7678 int decl_count = decls->length(); 7679 for (int i = 0; i < decl_count; ++i) { 7680 if (!decls->at(i)->IsInlineable()) { 7681 TraceInline(target, caller, "target has non-trivial declaration"); 7682 return false; 7683 } 7684 } 7685 7686 // Generate the deoptimization data for the unoptimized version of 7687 // the target function if we don't already have it. 7688 if (!target_shared->has_deoptimization_support()) { 7689 // Note that we compile here using the same AST that we will use for 7690 // generating the optimized inline code. 7691 target_info.EnableDeoptimizationSupport(); 7692 if (!FullCodeGenerator::MakeCode(&target_info)) { 7693 TraceInline(target, caller, "could not generate deoptimization info"); 7694 return false; 7695 } 7696 if (target_shared->scope_info() == ScopeInfo::Empty(isolate())) { 7697 // The scope info might not have been set if a lazily compiled 7698 // function is inlined before being called for the first time. 7699 Handle<ScopeInfo> target_scope_info = 7700 ScopeInfo::Create(target_info.scope(), zone()); 7701 target_shared->set_scope_info(*target_scope_info); 7702 } 7703 target_shared->EnableDeoptimizationSupport(*target_info.code()); 7704 target_shared->set_feedback_vector(*target_info.feedback_vector()); 7705 Compiler::RecordFunctionCompilation(Logger::FUNCTION_TAG, 7706 &target_info, 7707 target_shared); 7708 } 7709 7710 // ---------------------------------------------------------------- 7711 // After this point, we've made a decision to inline this function (so 7712 // TryInline should always return true). 7713 7714 // Type-check the inlined function. 7715 ASSERT(target_shared->has_deoptimization_support()); 7716 AstTyper::Run(&target_info); 7717 7718 int function_id = graph()->TraceInlinedFunction(target_shared, position); 7719 7720 // Save the pending call context. Set up new one for the inlined function. 7721 // The function state is new-allocated because we need to delete it 7722 // in two different places. 7723 FunctionState* target_state = new FunctionState( 7724 this, &target_info, inlining_kind, function_id); 7725 7726 HConstant* undefined = graph()->GetConstantUndefined(); 7727 7728 HEnvironment* inner_env = 7729 environment()->CopyForInlining(target, 7730 arguments_count, 7731 function, 7732 undefined, 7733 function_state()->inlining_kind()); 7734 7735 HConstant* context = Add<HConstant>(Handle<Context>(target->context())); 7736 inner_env->BindContext(context); 7737 7738 HArgumentsObject* arguments_object = NULL; 7739 7740 // If the function uses arguments object create and bind one, also copy 7741 // current arguments values to use them for materialization. 7742 if (function->scope()->arguments() != NULL) { 7743 ASSERT(function->scope()->arguments()->IsStackAllocated()); 7744 HEnvironment* arguments_env = inner_env->arguments_environment(); 7745 int arguments_count = arguments_env->parameter_count(); 7746 arguments_object = Add<HArgumentsObject>(arguments_count); 7747 inner_env->Bind(function->scope()->arguments(), arguments_object); 7748 for (int i = 0; i < arguments_count; i++) { 7749 arguments_object->AddArgument(arguments_env->Lookup(i), zone()); 7750 } 7751 } 7752 7753 // Capture the state before invoking the inlined function for deopt in the 7754 // inlined function. This simulate has no bailout-id since it's not directly 7755 // reachable for deopt, and is only used to capture the state. If the simulate 7756 // becomes reachable by merging, the ast id of the simulate merged into it is 7757 // adopted. 7758 Add<HSimulate>(BailoutId::None()); 7759 7760 current_block()->UpdateEnvironment(inner_env); 7761 Scope* saved_scope = scope(); 7762 set_scope(target_info.scope()); 7763 HEnterInlined* enter_inlined = 7764 Add<HEnterInlined>(return_id, target, arguments_count, function, 7765 function_state()->inlining_kind(), 7766 function->scope()->arguments(), 7767 arguments_object); 7768 function_state()->set_entry(enter_inlined); 7769 7770 VisitDeclarations(target_info.scope()->declarations()); 7771 VisitStatements(function->body()); 7772 set_scope(saved_scope); 7773 if (HasStackOverflow()) { 7774 // Bail out if the inline function did, as we cannot residualize a call 7775 // instead. 7776 TraceInline(target, caller, "inline graph construction failed"); 7777 target_shared->DisableOptimization(kInliningBailedOut); 7778 inline_bailout_ = true; 7779 delete target_state; 7780 return true; 7781 } 7782 7783 // Update inlined nodes count. 7784 inlined_count_ += nodes_added; 7785 7786 Handle<Code> unoptimized_code(target_shared->code()); 7787 ASSERT(unoptimized_code->kind() == Code::FUNCTION); 7788 Handle<TypeFeedbackInfo> type_info( 7789 TypeFeedbackInfo::cast(unoptimized_code->type_feedback_info())); 7790 graph()->update_type_change_checksum(type_info->own_type_change_checksum()); 7791 7792 TraceInline(target, caller, NULL); 7793 7794 if (current_block() != NULL) { 7795 FunctionState* state = function_state(); 7796 if (state->inlining_kind() == CONSTRUCT_CALL_RETURN) { 7797 // Falling off the end of an inlined construct call. In a test context the 7798 // return value will always evaluate to true, in a value context the 7799 // return value is the newly allocated receiver. 7800 if (call_context()->IsTest()) { 7801 Goto(inlined_test_context()->if_true(), state); 7802 } else if (call_context()->IsEffect()) { 7803 Goto(function_return(), state); 7804 } else { 7805 ASSERT(call_context()->IsValue()); 7806 AddLeaveInlined(implicit_return_value, state); 7807 } 7808 } else if (state->inlining_kind() == SETTER_CALL_RETURN) { 7809 // Falling off the end of an inlined setter call. The returned value is 7810 // never used, the value of an assignment is always the value of the RHS 7811 // of the assignment. 7812 if (call_context()->IsTest()) { 7813 inlined_test_context()->ReturnValue(implicit_return_value); 7814 } else if (call_context()->IsEffect()) { 7815 Goto(function_return(), state); 7816 } else { 7817 ASSERT(call_context()->IsValue()); 7818 AddLeaveInlined(implicit_return_value, state); 7819 } 7820 } else { 7821 // Falling off the end of a normal inlined function. This basically means 7822 // returning undefined. 7823 if (call_context()->IsTest()) { 7824 Goto(inlined_test_context()->if_false(), state); 7825 } else if (call_context()->IsEffect()) { 7826 Goto(function_return(), state); 7827 } else { 7828 ASSERT(call_context()->IsValue()); 7829 AddLeaveInlined(undefined, state); 7830 } 7831 } 7832 } 7833 7834 // Fix up the function exits. 7835 if (inlined_test_context() != NULL) { 7836 HBasicBlock* if_true = inlined_test_context()->if_true(); 7837 HBasicBlock* if_false = inlined_test_context()->if_false(); 7838 7839 HEnterInlined* entry = function_state()->entry(); 7840 7841 // Pop the return test context from the expression context stack. 7842 ASSERT(ast_context() == inlined_test_context()); 7843 ClearInlinedTestContext(); 7844 delete target_state; 7845 7846 // Forward to the real test context. 7847 if (if_true->HasPredecessor()) { 7848 entry->RegisterReturnTarget(if_true, zone()); 7849 if_true->SetJoinId(ast_id); 7850 HBasicBlock* true_target = TestContext::cast(ast_context())->if_true(); 7851 Goto(if_true, true_target, function_state()); 7852 } 7853 if (if_false->HasPredecessor()) { 7854 entry->RegisterReturnTarget(if_false, zone()); 7855 if_false->SetJoinId(ast_id); 7856 HBasicBlock* false_target = TestContext::cast(ast_context())->if_false(); 7857 Goto(if_false, false_target, function_state()); 7858 } 7859 set_current_block(NULL); 7860 return true; 7861 7862 } else if (function_return()->HasPredecessor()) { 7863 function_state()->entry()->RegisterReturnTarget(function_return(), zone()); 7864 function_return()->SetJoinId(ast_id); 7865 set_current_block(function_return()); 7866 } else { 7867 set_current_block(NULL); 7868 } 7869 delete target_state; 7870 return true; 7871 } 7872 7873 7874 bool HOptimizedGraphBuilder::TryInlineCall(Call* expr) { 7875 return TryInline(expr->target(), 7876 expr->arguments()->length(), 7877 NULL, 7878 expr->id(), 7879 expr->ReturnId(), 7880 NORMAL_RETURN, 7881 ScriptPositionToSourcePosition(expr->position())); 7882 } 7883 7884 7885 bool HOptimizedGraphBuilder::TryInlineConstruct(CallNew* expr, 7886 HValue* implicit_return_value) { 7887 return TryInline(expr->target(), 7888 expr->arguments()->length(), 7889 implicit_return_value, 7890 expr->id(), 7891 expr->ReturnId(), 7892 CONSTRUCT_CALL_RETURN, 7893 ScriptPositionToSourcePosition(expr->position())); 7894 } 7895 7896 7897 bool HOptimizedGraphBuilder::TryInlineGetter(Handle<JSFunction> getter, 7898 Handle<Map> receiver_map, 7899 BailoutId ast_id, 7900 BailoutId return_id) { 7901 if (TryInlineApiGetter(getter, receiver_map, ast_id)) return true; 7902 return TryInline(getter, 7903 0, 7904 NULL, 7905 ast_id, 7906 return_id, 7907 GETTER_CALL_RETURN, 7908 source_position()); 7909 } 7910 7911 7912 bool HOptimizedGraphBuilder::TryInlineSetter(Handle<JSFunction> setter, 7913 Handle<Map> receiver_map, 7914 BailoutId id, 7915 BailoutId assignment_id, 7916 HValue* implicit_return_value) { 7917 if (TryInlineApiSetter(setter, receiver_map, id)) return true; 7918 return TryInline(setter, 7919 1, 7920 implicit_return_value, 7921 id, assignment_id, 7922 SETTER_CALL_RETURN, 7923 source_position()); 7924 } 7925 7926 7927 bool HOptimizedGraphBuilder::TryInlineApply(Handle<JSFunction> function, 7928 Call* expr, 7929 int arguments_count) { 7930 return TryInline(function, 7931 arguments_count, 7932 NULL, 7933 expr->id(), 7934 expr->ReturnId(), 7935 NORMAL_RETURN, 7936 ScriptPositionToSourcePosition(expr->position())); 7937 } 7938 7939 7940 bool HOptimizedGraphBuilder::TryInlineBuiltinFunctionCall(Call* expr) { 7941 if (!expr->target()->shared()->HasBuiltinFunctionId()) return false; 7942 BuiltinFunctionId id = expr->target()->shared()->builtin_function_id(); 7943 switch (id) { 7944 case kMathExp: 7945 if (!FLAG_fast_math) break; 7946 // Fall through if FLAG_fast_math. 7947 case kMathRound: 7948 case kMathFloor: 7949 case kMathAbs: 7950 case kMathSqrt: 7951 case kMathLog: 7952 case kMathClz32: 7953 if (expr->arguments()->length() == 1) { 7954 HValue* argument = Pop(); 7955 Drop(2); // Receiver and function. 7956 HInstruction* op = NewUncasted<HUnaryMathOperation>(argument, id); 7957 ast_context()->ReturnInstruction(op, expr->id()); 7958 return true; 7959 } 7960 break; 7961 case kMathImul: 7962 if (expr->arguments()->length() == 2) { 7963 HValue* right = Pop(); 7964 HValue* left = Pop(); 7965 Drop(2); // Receiver and function. 7966 HInstruction* op = HMul::NewImul(zone(), context(), left, right); 7967 ast_context()->ReturnInstruction(op, expr->id()); 7968 return true; 7969 } 7970 break; 7971 default: 7972 // Not supported for inlining yet. 7973 break; 7974 } 7975 return false; 7976 } 7977 7978 7979 bool HOptimizedGraphBuilder::TryInlineBuiltinMethodCall( 7980 Call* expr, 7981 HValue* receiver, 7982 Handle<Map> receiver_map) { 7983 // Try to inline calls like Math.* as operations in the calling function. 7984 if (!expr->target()->shared()->HasBuiltinFunctionId()) return false; 7985 BuiltinFunctionId id = expr->target()->shared()->builtin_function_id(); 7986 int argument_count = expr->arguments()->length() + 1; // Plus receiver. 7987 switch (id) { 7988 case kStringCharCodeAt: 7989 case kStringCharAt: 7990 if (argument_count == 2) { 7991 HValue* index = Pop(); 7992 HValue* string = Pop(); 7993 Drop(1); // Function. 7994 HInstruction* char_code = 7995 BuildStringCharCodeAt(string, index); 7996 if (id == kStringCharCodeAt) { 7997 ast_context()->ReturnInstruction(char_code, expr->id()); 7998 return true; 7999 } 8000 AddInstruction(char_code); 8001 HInstruction* result = NewUncasted<HStringCharFromCode>(char_code); 8002 ast_context()->ReturnInstruction(result, expr->id()); 8003 return true; 8004 } 8005 break; 8006 case kStringFromCharCode: 8007 if (argument_count == 2) { 8008 HValue* argument = Pop(); 8009 Drop(2); // Receiver and function. 8010 HInstruction* result = NewUncasted<HStringCharFromCode>(argument); 8011 ast_context()->ReturnInstruction(result, expr->id()); 8012 return true; 8013 } 8014 break; 8015 case kMathExp: 8016 if (!FLAG_fast_math) break; 8017 // Fall through if FLAG_fast_math. 8018 case kMathRound: 8019 case kMathFloor: 8020 case kMathAbs: 8021 case kMathSqrt: 8022 case kMathLog: 8023 case kMathClz32: 8024 if (argument_count == 2) { 8025 HValue* argument = Pop(); 8026 Drop(2); // Receiver and function. 8027 HInstruction* op = NewUncasted<HUnaryMathOperation>(argument, id); 8028 ast_context()->ReturnInstruction(op, expr->id()); 8029 return true; 8030 } 8031 break; 8032 case kMathPow: 8033 if (argument_count == 3) { 8034 HValue* right = Pop(); 8035 HValue* left = Pop(); 8036 Drop(2); // Receiver and function. 8037 HInstruction* result = NULL; 8038 // Use sqrt() if exponent is 0.5 or -0.5. 8039 if (right->IsConstant() && HConstant::cast(right)->HasDoubleValue()) { 8040 double exponent = HConstant::cast(right)->DoubleValue(); 8041 if (exponent == 0.5) { 8042 result = NewUncasted<HUnaryMathOperation>(left, kMathPowHalf); 8043 } else if (exponent == -0.5) { 8044 HValue* one = graph()->GetConstant1(); 8045 HInstruction* sqrt = AddUncasted<HUnaryMathOperation>( 8046 left, kMathPowHalf); 8047 // MathPowHalf doesn't have side effects so there's no need for 8048 // an environment simulation here. 8049 ASSERT(!sqrt->HasObservableSideEffects()); 8050 result = NewUncasted<HDiv>(one, sqrt); 8051 } else if (exponent == 2.0) { 8052 result = NewUncasted<HMul>(left, left); 8053 } 8054 } 8055 8056 if (result == NULL) { 8057 result = NewUncasted<HPower>(left, right); 8058 } 8059 ast_context()->ReturnInstruction(result, expr->id()); 8060 return true; 8061 } 8062 break; 8063 case kMathMax: 8064 case kMathMin: 8065 if (argument_count == 3) { 8066 HValue* right = Pop(); 8067 HValue* left = Pop(); 8068 Drop(2); // Receiver and function. 8069 HMathMinMax::Operation op = (id == kMathMin) ? HMathMinMax::kMathMin 8070 : HMathMinMax::kMathMax; 8071 HInstruction* result = NewUncasted<HMathMinMax>(left, right, op); 8072 ast_context()->ReturnInstruction(result, expr->id()); 8073 return true; 8074 } 8075 break; 8076 case kMathImul: 8077 if (argument_count == 3) { 8078 HValue* right = Pop(); 8079 HValue* left = Pop(); 8080 Drop(2); // Receiver and function. 8081 HInstruction* result = HMul::NewImul(zone(), context(), left, right); 8082 ast_context()->ReturnInstruction(result, expr->id()); 8083 return true; 8084 } 8085 break; 8086 case kArrayPop: { 8087 if (receiver_map.is_null()) return false; 8088 if (receiver_map->instance_type() != JS_ARRAY_TYPE) return false; 8089 ElementsKind elements_kind = receiver_map->elements_kind(); 8090 if (!IsFastElementsKind(elements_kind)) return false; 8091 if (receiver_map->is_observed()) return false; 8092 ASSERT(receiver_map->is_extensible()); 8093 8094 Drop(expr->arguments()->length()); 8095 HValue* result; 8096 HValue* reduced_length; 8097 HValue* receiver = Pop(); 8098 8099 HValue* checked_object = AddCheckMap(receiver, receiver_map); 8100 HValue* length = Add<HLoadNamedField>( 8101 checked_object, static_cast<HValue*>(NULL), 8102 HObjectAccess::ForArrayLength(elements_kind)); 8103 8104 Drop(1); // Function. 8105 8106 { NoObservableSideEffectsScope scope(this); 8107 IfBuilder length_checker(this); 8108 8109 HValue* bounds_check = length_checker.If<HCompareNumericAndBranch>( 8110 length, graph()->GetConstant0(), Token::EQ); 8111 length_checker.Then(); 8112 8113 if (!ast_context()->IsEffect()) Push(graph()->GetConstantUndefined()); 8114 8115 length_checker.Else(); 8116 HValue* elements = AddLoadElements(checked_object); 8117 // Ensure that we aren't popping from a copy-on-write array. 8118 if (IsFastSmiOrObjectElementsKind(elements_kind)) { 8119 elements = BuildCopyElementsOnWrite(checked_object, elements, 8120 elements_kind, length); 8121 } 8122 reduced_length = AddUncasted<HSub>(length, graph()->GetConstant1()); 8123 result = AddElementAccess(elements, reduced_length, NULL, 8124 bounds_check, elements_kind, LOAD); 8125 Factory* factory = isolate()->factory(); 8126 double nan_double = FixedDoubleArray::hole_nan_as_double(); 8127 HValue* hole = IsFastSmiOrObjectElementsKind(elements_kind) 8128 ? Add<HConstant>(factory->the_hole_value()) 8129 : Add<HConstant>(nan_double); 8130 if (IsFastSmiOrObjectElementsKind(elements_kind)) { 8131 elements_kind = FAST_HOLEY_ELEMENTS; 8132 } 8133 AddElementAccess( 8134 elements, reduced_length, hole, bounds_check, elements_kind, STORE); 8135 Add<HStoreNamedField>( 8136 checked_object, HObjectAccess::ForArrayLength(elements_kind), 8137 reduced_length, STORE_TO_INITIALIZED_ENTRY); 8138 8139 if (!ast_context()->IsEffect()) Push(result); 8140 8141 length_checker.End(); 8142 } 8143 result = ast_context()->IsEffect() ? graph()->GetConstant0() : Top(); 8144 Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE); 8145 if (!ast_context()->IsEffect()) Drop(1); 8146 8147 ast_context()->ReturnValue(result); 8148 return true; 8149 } 8150 case kArrayPush: { 8151 if (receiver_map.is_null()) return false; 8152 if (receiver_map->instance_type() != JS_ARRAY_TYPE) return false; 8153 ElementsKind elements_kind = receiver_map->elements_kind(); 8154 if (!IsFastElementsKind(elements_kind)) return false; 8155 if (receiver_map->is_observed()) return false; 8156 if (JSArray::IsReadOnlyLengthDescriptor(receiver_map)) return false; 8157 ASSERT(receiver_map->is_extensible()); 8158 8159 // If there may be elements accessors in the prototype chain, the fast 8160 // inlined version can't be used. 8161 if (receiver_map->DictionaryElementsInPrototypeChainOnly()) return false; 8162 // If there currently can be no elements accessors on the prototype chain, 8163 // it doesn't mean that there won't be any later. Install a full prototype 8164 // chain check to trap element accessors being installed on the prototype 8165 // chain, which would cause elements to go to dictionary mode and result 8166 // in a map change. 8167 Handle<JSObject> prototype(JSObject::cast(receiver_map->prototype())); 8168 BuildCheckPrototypeMaps(prototype, Handle<JSObject>()); 8169 8170 const int argc = expr->arguments()->length(); 8171 if (argc != 1) return false; 8172 8173 HValue* value_to_push = Pop(); 8174 HValue* array = Pop(); 8175 Drop(1); // Drop function. 8176 8177 HInstruction* new_size = NULL; 8178 HValue* length = NULL; 8179 8180 { 8181 NoObservableSideEffectsScope scope(this); 8182 8183 length = Add<HLoadNamedField>(array, static_cast<HValue*>(NULL), 8184 HObjectAccess::ForArrayLength(elements_kind)); 8185 8186 new_size = AddUncasted<HAdd>(length, graph()->GetConstant1()); 8187 8188 bool is_array = receiver_map->instance_type() == JS_ARRAY_TYPE; 8189 BuildUncheckedMonomorphicElementAccess(array, length, 8190 value_to_push, is_array, 8191 elements_kind, STORE, 8192 NEVER_RETURN_HOLE, 8193 STORE_AND_GROW_NO_TRANSITION); 8194 8195 if (!ast_context()->IsEffect()) Push(new_size); 8196 Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE); 8197 if (!ast_context()->IsEffect()) Drop(1); 8198 } 8199 8200 ast_context()->ReturnValue(new_size); 8201 return true; 8202 } 8203 case kArrayShift: { 8204 if (receiver_map.is_null()) return false; 8205 if (receiver_map->instance_type() != JS_ARRAY_TYPE) return false; 8206 ElementsKind kind = receiver_map->elements_kind(); 8207 if (!IsFastElementsKind(kind)) return false; 8208 if (receiver_map->is_observed()) return false; 8209 ASSERT(receiver_map->is_extensible()); 8210 8211 // If there may be elements accessors in the prototype chain, the fast 8212 // inlined version can't be used. 8213 if (receiver_map->DictionaryElementsInPrototypeChainOnly()) return false; 8214 8215 // If there currently can be no elements accessors on the prototype chain, 8216 // it doesn't mean that there won't be any later. Install a full prototype 8217 // chain check to trap element accessors being installed on the prototype 8218 // chain, which would cause elements to go to dictionary mode and result 8219 // in a map change. 8220 BuildCheckPrototypeMaps( 8221 handle(JSObject::cast(receiver_map->prototype()), isolate()), 8222 Handle<JSObject>::null()); 8223 8224 // Threshold for fast inlined Array.shift(). 8225 HConstant* inline_threshold = Add<HConstant>(static_cast<int32_t>(16)); 8226 8227 Drop(expr->arguments()->length()); 8228 HValue* receiver = Pop(); 8229 HValue* function = Pop(); 8230 HValue* result; 8231 8232 { 8233 NoObservableSideEffectsScope scope(this); 8234 8235 HValue* length = Add<HLoadNamedField>( 8236 receiver, static_cast<HValue*>(NULL), 8237 HObjectAccess::ForArrayLength(kind)); 8238 8239 IfBuilder if_lengthiszero(this); 8240 HValue* lengthiszero = if_lengthiszero.If<HCompareNumericAndBranch>( 8241 length, graph()->GetConstant0(), Token::EQ); 8242 if_lengthiszero.Then(); 8243 { 8244 if (!ast_context()->IsEffect()) Push(graph()->GetConstantUndefined()); 8245 } 8246 if_lengthiszero.Else(); 8247 { 8248 HValue* elements = AddLoadElements(receiver); 8249 8250 // Check if we can use the fast inlined Array.shift(). 8251 IfBuilder if_inline(this); 8252 if_inline.If<HCompareNumericAndBranch>( 8253 length, inline_threshold, Token::LTE); 8254 if (IsFastSmiOrObjectElementsKind(kind)) { 8255 // We cannot handle copy-on-write backing stores here. 8256 if_inline.AndIf<HCompareMap>( 8257 elements, isolate()->factory()->fixed_array_map()); 8258 } 8259 if_inline.Then(); 8260 { 8261 // Remember the result. 8262 if (!ast_context()->IsEffect()) { 8263 Push(AddElementAccess(elements, graph()->GetConstant0(), NULL, 8264 lengthiszero, kind, LOAD)); 8265 } 8266 8267 // Compute the new length. 8268 HValue* new_length = AddUncasted<HSub>( 8269 length, graph()->GetConstant1()); 8270 new_length->ClearFlag(HValue::kCanOverflow); 8271 8272 // Copy the remaining elements. 8273 LoopBuilder loop(this, context(), LoopBuilder::kPostIncrement); 8274 { 8275 HValue* new_key = loop.BeginBody( 8276 graph()->GetConstant0(), new_length, Token::LT); 8277 HValue* key = AddUncasted<HAdd>(new_key, graph()->GetConstant1()); 8278 key->ClearFlag(HValue::kCanOverflow); 8279 HValue* element = AddUncasted<HLoadKeyed>( 8280 elements, key, lengthiszero, kind, ALLOW_RETURN_HOLE); 8281 HStoreKeyed* store = Add<HStoreKeyed>( 8282 elements, new_key, element, kind); 8283 store->SetFlag(HValue::kAllowUndefinedAsNaN); 8284 } 8285 loop.EndBody(); 8286 8287 // Put a hole at the end. 8288 HValue* hole = IsFastSmiOrObjectElementsKind(kind) 8289 ? Add<HConstant>(isolate()->factory()->the_hole_value()) 8290 : Add<HConstant>(FixedDoubleArray::hole_nan_as_double()); 8291 if (IsFastSmiOrObjectElementsKind(kind)) kind = FAST_HOLEY_ELEMENTS; 8292 Add<HStoreKeyed>( 8293 elements, new_length, hole, kind, INITIALIZING_STORE); 8294 8295 // Remember new length. 8296 Add<HStoreNamedField>( 8297 receiver, HObjectAccess::ForArrayLength(kind), 8298 new_length, STORE_TO_INITIALIZED_ENTRY); 8299 } 8300 if_inline.Else(); 8301 { 8302 Add<HPushArguments>(receiver); 8303 result = Add<HCallJSFunction>(function, 1, true); 8304 if (!ast_context()->IsEffect()) Push(result); 8305 } 8306 if_inline.End(); 8307 } 8308 if_lengthiszero.End(); 8309 } 8310 result = ast_context()->IsEffect() ? graph()->GetConstant0() : Top(); 8311 Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE); 8312 if (!ast_context()->IsEffect()) Drop(1); 8313 ast_context()->ReturnValue(result); 8314 return true; 8315 } 8316 case kArrayIndexOf: 8317 case kArrayLastIndexOf: { 8318 if (receiver_map.is_null()) return false; 8319 if (receiver_map->instance_type() != JS_ARRAY_TYPE) return false; 8320 ElementsKind kind = receiver_map->elements_kind(); 8321 if (!IsFastElementsKind(kind)) return false; 8322 if (receiver_map->is_observed()) return false; 8323 if (argument_count != 2) return false; 8324 ASSERT(receiver_map->is_extensible()); 8325 8326 // If there may be elements accessors in the prototype chain, the fast 8327 // inlined version can't be used. 8328 if (receiver_map->DictionaryElementsInPrototypeChainOnly()) return false; 8329 8330 // If there currently can be no elements accessors on the prototype chain, 8331 // it doesn't mean that there won't be any later. Install a full prototype 8332 // chain check to trap element accessors being installed on the prototype 8333 // chain, which would cause elements to go to dictionary mode and result 8334 // in a map change. 8335 BuildCheckPrototypeMaps( 8336 handle(JSObject::cast(receiver_map->prototype()), isolate()), 8337 Handle<JSObject>::null()); 8338 8339 HValue* search_element = Pop(); 8340 HValue* receiver = Pop(); 8341 Drop(1); // Drop function. 8342 8343 ArrayIndexOfMode mode = (id == kArrayIndexOf) 8344 ? kFirstIndexOf : kLastIndexOf; 8345 HValue* index = BuildArrayIndexOf(receiver, search_element, kind, mode); 8346 8347 if (!ast_context()->IsEffect()) Push(index); 8348 Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE); 8349 if (!ast_context()->IsEffect()) Drop(1); 8350 ast_context()->ReturnValue(index); 8351 return true; 8352 } 8353 default: 8354 // Not yet supported for inlining. 8355 break; 8356 } 8357 return false; 8358 } 8359 8360 8361 bool HOptimizedGraphBuilder::TryInlineApiFunctionCall(Call* expr, 8362 HValue* receiver) { 8363 Handle<JSFunction> function = expr->target(); 8364 int argc = expr->arguments()->length(); 8365 SmallMapList receiver_maps; 8366 return TryInlineApiCall(function, 8367 receiver, 8368 &receiver_maps, 8369 argc, 8370 expr->id(), 8371 kCallApiFunction); 8372 } 8373 8374 8375 bool HOptimizedGraphBuilder::TryInlineApiMethodCall( 8376 Call* expr, 8377 HValue* receiver, 8378 SmallMapList* receiver_maps) { 8379 Handle<JSFunction> function = expr->target(); 8380 int argc = expr->arguments()->length(); 8381 return TryInlineApiCall(function, 8382 receiver, 8383 receiver_maps, 8384 argc, 8385 expr->id(), 8386 kCallApiMethod); 8387 } 8388 8389 8390 bool HOptimizedGraphBuilder::TryInlineApiGetter(Handle<JSFunction> function, 8391 Handle<Map> receiver_map, 8392 BailoutId ast_id) { 8393 SmallMapList receiver_maps(1, zone()); 8394 receiver_maps.Add(receiver_map, zone()); 8395 return TryInlineApiCall(function, 8396 NULL, // Receiver is on expression stack. 8397 &receiver_maps, 8398 0, 8399 ast_id, 8400 kCallApiGetter); 8401 } 8402 8403 8404 bool HOptimizedGraphBuilder::TryInlineApiSetter(Handle<JSFunction> function, 8405 Handle<Map> receiver_map, 8406 BailoutId ast_id) { 8407 SmallMapList receiver_maps(1, zone()); 8408 receiver_maps.Add(receiver_map, zone()); 8409 return TryInlineApiCall(function, 8410 NULL, // Receiver is on expression stack. 8411 &receiver_maps, 8412 1, 8413 ast_id, 8414 kCallApiSetter); 8415 } 8416 8417 8418 bool HOptimizedGraphBuilder::TryInlineApiCall(Handle<JSFunction> function, 8419 HValue* receiver, 8420 SmallMapList* receiver_maps, 8421 int argc, 8422 BailoutId ast_id, 8423 ApiCallType call_type) { 8424 CallOptimization optimization(function); 8425 if (!optimization.is_simple_api_call()) return false; 8426 Handle<Map> holder_map; 8427 if (call_type == kCallApiFunction) { 8428 // Cannot embed a direct reference to the global proxy map 8429 // as it maybe dropped on deserialization. 8430 CHECK(!isolate()->serializer_enabled()); 8431 ASSERT_EQ(0, receiver_maps->length()); 8432 receiver_maps->Add(handle( 8433 function->context()->global_object()->global_receiver()->map()), 8434 zone()); 8435 } 8436 CallOptimization::HolderLookup holder_lookup = 8437 CallOptimization::kHolderNotFound; 8438 Handle<JSObject> api_holder = optimization.LookupHolderOfExpectedType( 8439 receiver_maps->first(), &holder_lookup); 8440 if (holder_lookup == CallOptimization::kHolderNotFound) return false; 8441 8442 if (FLAG_trace_inlining) { 8443 PrintF("Inlining api function "); 8444 function->ShortPrint(); 8445 PrintF("\n"); 8446 } 8447 8448 bool drop_extra = false; 8449 bool is_store = false; 8450 switch (call_type) { 8451 case kCallApiFunction: 8452 case kCallApiMethod: 8453 // Need to check that none of the receiver maps could have changed. 8454 Add<HCheckMaps>(receiver, receiver_maps); 8455 // Need to ensure the chain between receiver and api_holder is intact. 8456 if (holder_lookup == CallOptimization::kHolderFound) { 8457 AddCheckPrototypeMaps(api_holder, receiver_maps->first()); 8458 } else { 8459 ASSERT_EQ(holder_lookup, CallOptimization::kHolderIsReceiver); 8460 } 8461 // Includes receiver. 8462 PushArgumentsFromEnvironment(argc + 1); 8463 // Drop function after call. 8464 drop_extra = true; 8465 break; 8466 case kCallApiGetter: 8467 // Receiver and prototype chain cannot have changed. 8468 ASSERT_EQ(0, argc); 8469 ASSERT_EQ(NULL, receiver); 8470 // Receiver is on expression stack. 8471 receiver = Pop(); 8472 Add<HPushArguments>(receiver); 8473 break; 8474 case kCallApiSetter: 8475 { 8476 is_store = true; 8477 // Receiver and prototype chain cannot have changed. 8478 ASSERT_EQ(1, argc); 8479 ASSERT_EQ(NULL, receiver); 8480 // Receiver and value are on expression stack. 8481 HValue* value = Pop(); 8482 receiver = Pop(); 8483 Add<HPushArguments>(receiver, value); 8484 break; 8485 } 8486 } 8487 8488 HValue* holder = NULL; 8489 switch (holder_lookup) { 8490 case CallOptimization::kHolderFound: 8491 holder = Add<HConstant>(api_holder); 8492 break; 8493 case CallOptimization::kHolderIsReceiver: 8494 holder = receiver; 8495 break; 8496 case CallOptimization::kHolderNotFound: 8497 UNREACHABLE(); 8498 break; 8499 } 8500 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info(); 8501 Handle<Object> call_data_obj(api_call_info->data(), isolate()); 8502 bool call_data_is_undefined = call_data_obj->IsUndefined(); 8503 HValue* call_data = Add<HConstant>(call_data_obj); 8504 ApiFunction fun(v8::ToCData<Address>(api_call_info->callback())); 8505 ExternalReference ref = ExternalReference(&fun, 8506 ExternalReference::DIRECT_API_CALL, 8507 isolate()); 8508 HValue* api_function_address = Add<HConstant>(ExternalReference(ref)); 8509 8510 HValue* op_vals[] = { 8511 Add<HConstant>(function), 8512 call_data, 8513 holder, 8514 api_function_address, 8515 context() 8516 }; 8517 8518 CallInterfaceDescriptor* descriptor = 8519 isolate()->call_descriptor(Isolate::ApiFunctionCall); 8520 8521 CallApiFunctionStub stub(isolate(), is_store, call_data_is_undefined, argc); 8522 Handle<Code> code = stub.GetCode(); 8523 HConstant* code_value = Add<HConstant>(code); 8524 8525 ASSERT((sizeof(op_vals) / kPointerSize) == 8526 descriptor->environment_length()); 8527 8528 HInstruction* call = New<HCallWithDescriptor>( 8529 code_value, argc + 1, descriptor, 8530 Vector<HValue*>(op_vals, descriptor->environment_length())); 8531 8532 if (drop_extra) Drop(1); // Drop function. 8533 ast_context()->ReturnInstruction(call, ast_id); 8534 return true; 8535 } 8536 8537 8538 bool HOptimizedGraphBuilder::TryCallApply(Call* expr) { 8539 ASSERT(expr->expression()->IsProperty()); 8540 8541 if (!expr->IsMonomorphic()) { 8542 return false; 8543 } 8544 Handle<Map> function_map = expr->GetReceiverTypes()->first(); 8545 if (function_map->instance_type() != JS_FUNCTION_TYPE || 8546 !expr->target()->shared()->HasBuiltinFunctionId() || 8547 expr->target()->shared()->builtin_function_id() != kFunctionApply) { 8548 return false; 8549 } 8550 8551 if (current_info()->scope()->arguments() == NULL) return false; 8552 8553 ZoneList<Expression*>* args = expr->arguments(); 8554 if (args->length() != 2) return false; 8555 8556 VariableProxy* arg_two = args->at(1)->AsVariableProxy(); 8557 if (arg_two == NULL || !arg_two->var()->IsStackAllocated()) return false; 8558 HValue* arg_two_value = LookupAndMakeLive(arg_two->var()); 8559 if (!arg_two_value->CheckFlag(HValue::kIsArguments)) return false; 8560 8561 // Found pattern f.apply(receiver, arguments). 8562 CHECK_ALIVE_OR_RETURN(VisitForValue(args->at(0)), true); 8563 HValue* receiver = Pop(); // receiver 8564 HValue* function = Pop(); // f 8565 Drop(1); // apply 8566 8567 HValue* checked_function = AddCheckMap(function, function_map); 8568 8569 if (function_state()->outer() == NULL) { 8570 HInstruction* elements = Add<HArgumentsElements>(false); 8571 HInstruction* length = Add<HArgumentsLength>(elements); 8572 HValue* wrapped_receiver = BuildWrapReceiver(receiver, checked_function); 8573 HInstruction* result = New<HApplyArguments>(function, 8574 wrapped_receiver, 8575 length, 8576 elements); 8577 ast_context()->ReturnInstruction(result, expr->id()); 8578 return true; 8579 } else { 8580 // We are inside inlined function and we know exactly what is inside 8581 // arguments object. But we need to be able to materialize at deopt. 8582 ASSERT_EQ(environment()->arguments_environment()->parameter_count(), 8583 function_state()->entry()->arguments_object()->arguments_count()); 8584 HArgumentsObject* args = function_state()->entry()->arguments_object(); 8585 const ZoneList<HValue*>* arguments_values = args->arguments_values(); 8586 int arguments_count = arguments_values->length(); 8587 Push(function); 8588 Push(BuildWrapReceiver(receiver, checked_function)); 8589 for (int i = 1; i < arguments_count; i++) { 8590 Push(arguments_values->at(i)); 8591 } 8592 8593 Handle<JSFunction> known_function; 8594 if (function->IsConstant() && 8595 HConstant::cast(function)->handle(isolate())->IsJSFunction()) { 8596 known_function = Handle<JSFunction>::cast( 8597 HConstant::cast(function)->handle(isolate())); 8598 int args_count = arguments_count - 1; // Excluding receiver. 8599 if (TryInlineApply(known_function, expr, args_count)) return true; 8600 } 8601 8602 PushArgumentsFromEnvironment(arguments_count); 8603 HInvokeFunction* call = New<HInvokeFunction>( 8604 function, known_function, arguments_count); 8605 Drop(1); // Function. 8606 ast_context()->ReturnInstruction(call, expr->id()); 8607 return true; 8608 } 8609 } 8610 8611 8612 HValue* HOptimizedGraphBuilder::ImplicitReceiverFor(HValue* function, 8613 Handle<JSFunction> target) { 8614 SharedFunctionInfo* shared = target->shared(); 8615 if (shared->strict_mode() == SLOPPY && !shared->native()) { 8616 // Cannot embed a direct reference to the global proxy 8617 // as is it dropped on deserialization. 8618 CHECK(!isolate()->serializer_enabled()); 8619 Handle<JSObject> global_receiver( 8620 target->context()->global_object()->global_receiver()); 8621 return Add<HConstant>(global_receiver); 8622 } 8623 return graph()->GetConstantUndefined(); 8624 } 8625 8626 8627 void HOptimizedGraphBuilder::BuildArrayCall(Expression* expression, 8628 int arguments_count, 8629 HValue* function, 8630 Handle<AllocationSite> site) { 8631 Add<HCheckValue>(function, array_function()); 8632 8633 if (IsCallArrayInlineable(arguments_count, site)) { 8634 BuildInlinedCallArray(expression, arguments_count, site); 8635 return; 8636 } 8637 8638 HInstruction* call = PreProcessCall(New<HCallNewArray>( 8639 function, arguments_count + 1, site->GetElementsKind())); 8640 if (expression->IsCall()) { 8641 Drop(1); 8642 } 8643 ast_context()->ReturnInstruction(call, expression->id()); 8644 } 8645 8646 8647 HValue* HOptimizedGraphBuilder::BuildArrayIndexOf(HValue* receiver, 8648 HValue* search_element, 8649 ElementsKind kind, 8650 ArrayIndexOfMode mode) { 8651 ASSERT(IsFastElementsKind(kind)); 8652 8653 NoObservableSideEffectsScope no_effects(this); 8654 8655 HValue* elements = AddLoadElements(receiver); 8656 HValue* length = AddLoadArrayLength(receiver, kind); 8657 8658 HValue* initial; 8659 HValue* terminating; 8660 Token::Value token; 8661 LoopBuilder::Direction direction; 8662 if (mode == kFirstIndexOf) { 8663 initial = graph()->GetConstant0(); 8664 terminating = length; 8665 token = Token::LT; 8666 direction = LoopBuilder::kPostIncrement; 8667 } else { 8668 ASSERT_EQ(kLastIndexOf, mode); 8669 initial = length; 8670 terminating = graph()->GetConstant0(); 8671 token = Token::GT; 8672 direction = LoopBuilder::kPreDecrement; 8673 } 8674 8675 Push(graph()->GetConstantMinus1()); 8676 if (IsFastDoubleElementsKind(kind) || IsFastSmiElementsKind(kind)) { 8677 LoopBuilder loop(this, context(), direction); 8678 { 8679 HValue* index = loop.BeginBody(initial, terminating, token); 8680 HValue* element = AddUncasted<HLoadKeyed>( 8681 elements, index, static_cast<HValue*>(NULL), 8682 kind, ALLOW_RETURN_HOLE); 8683 IfBuilder if_issame(this); 8684 if (IsFastDoubleElementsKind(kind)) { 8685 if_issame.If<HCompareNumericAndBranch>( 8686 element, search_element, Token::EQ_STRICT); 8687 } else { 8688 if_issame.If<HCompareObjectEqAndBranch>(element, search_element); 8689 } 8690 if_issame.Then(); 8691 { 8692 Drop(1); 8693 Push(index); 8694 loop.Break(); 8695 } 8696 if_issame.End(); 8697 } 8698 loop.EndBody(); 8699 } else { 8700 IfBuilder if_isstring(this); 8701 if_isstring.If<HIsStringAndBranch>(search_element); 8702 if_isstring.Then(); 8703 { 8704 LoopBuilder loop(this, context(), direction); 8705 { 8706 HValue* index = loop.BeginBody(initial, terminating, token); 8707 HValue* element = AddUncasted<HLoadKeyed>( 8708 elements, index, static_cast<HValue*>(NULL), 8709 kind, ALLOW_RETURN_HOLE); 8710 IfBuilder if_issame(this); 8711 if_issame.If<HIsStringAndBranch>(element); 8712 if_issame.AndIf<HStringCompareAndBranch>( 8713 element, search_element, Token::EQ_STRICT); 8714 if_issame.Then(); 8715 { 8716 Drop(1); 8717 Push(index); 8718 loop.Break(); 8719 } 8720 if_issame.End(); 8721 } 8722 loop.EndBody(); 8723 } 8724 if_isstring.Else(); 8725 { 8726 IfBuilder if_isnumber(this); 8727 if_isnumber.If<HIsSmiAndBranch>(search_element); 8728 if_isnumber.OrIf<HCompareMap>( 8729 search_element, isolate()->factory()->heap_number_map()); 8730 if_isnumber.Then(); 8731 { 8732 HValue* search_number = 8733 AddUncasted<HForceRepresentation>(search_element, 8734 Representation::Double()); 8735 LoopBuilder loop(this, context(), direction); 8736 { 8737 HValue* index = loop.BeginBody(initial, terminating, token); 8738 HValue* element = AddUncasted<HLoadKeyed>( 8739 elements, index, static_cast<HValue*>(NULL), 8740 kind, ALLOW_RETURN_HOLE); 8741 8742 IfBuilder if_element_isnumber(this); 8743 if_element_isnumber.If<HIsSmiAndBranch>(element); 8744 if_element_isnumber.OrIf<HCompareMap>( 8745 element, isolate()->factory()->heap_number_map()); 8746 if_element_isnumber.Then(); 8747 { 8748 HValue* number = 8749 AddUncasted<HForceRepresentation>(element, 8750 Representation::Double()); 8751 IfBuilder if_issame(this); 8752 if_issame.If<HCompareNumericAndBranch>( 8753 number, search_number, Token::EQ_STRICT); 8754 if_issame.Then(); 8755 { 8756 Drop(1); 8757 Push(index); 8758 loop.Break(); 8759 } 8760 if_issame.End(); 8761 } 8762 if_element_isnumber.End(); 8763 } 8764 loop.EndBody(); 8765 } 8766 if_isnumber.Else(); 8767 { 8768 LoopBuilder loop(this, context(), direction); 8769 { 8770 HValue* index = loop.BeginBody(initial, terminating, token); 8771 HValue* element = AddUncasted<HLoadKeyed>( 8772 elements, index, static_cast<HValue*>(NULL), 8773 kind, ALLOW_RETURN_HOLE); 8774 IfBuilder if_issame(this); 8775 if_issame.If<HCompareObjectEqAndBranch>( 8776 element, search_element); 8777 if_issame.Then(); 8778 { 8779 Drop(1); 8780 Push(index); 8781 loop.Break(); 8782 } 8783 if_issame.End(); 8784 } 8785 loop.EndBody(); 8786 } 8787 if_isnumber.End(); 8788 } 8789 if_isstring.End(); 8790 } 8791 8792 return Pop(); 8793 } 8794 8795 8796 bool HOptimizedGraphBuilder::TryHandleArrayCall(Call* expr, HValue* function) { 8797 if (!array_function().is_identical_to(expr->target())) { 8798 return false; 8799 } 8800 8801 Handle<AllocationSite> site = expr->allocation_site(); 8802 if (site.is_null()) return false; 8803 8804 BuildArrayCall(expr, 8805 expr->arguments()->length(), 8806 function, 8807 site); 8808 return true; 8809 } 8810 8811 8812 bool HOptimizedGraphBuilder::TryHandleArrayCallNew(CallNew* expr, 8813 HValue* function) { 8814 if (!array_function().is_identical_to(expr->target())) { 8815 return false; 8816 } 8817 8818 BuildArrayCall(expr, 8819 expr->arguments()->length(), 8820 function, 8821 expr->allocation_site()); 8822 return true; 8823 } 8824 8825 8826 void HOptimizedGraphBuilder::VisitCall(Call* expr) { 8827 ASSERT(!HasStackOverflow()); 8828 ASSERT(current_block() != NULL); 8829 ASSERT(current_block()->HasPredecessor()); 8830 Expression* callee = expr->expression(); 8831 int argument_count = expr->arguments()->length() + 1; // Plus receiver. 8832 HInstruction* call = NULL; 8833 8834 Property* prop = callee->AsProperty(); 8835 if (prop != NULL) { 8836 CHECK_ALIVE(VisitForValue(prop->obj())); 8837 HValue* receiver = Top(); 8838 8839 SmallMapList* types; 8840 ComputeReceiverTypes(expr, receiver, &types, zone()); 8841 8842 if (prop->key()->IsPropertyName() && types->length() > 0) { 8843 Handle<String> name = prop->key()->AsLiteral()->AsPropertyName(); 8844 PropertyAccessInfo info(this, LOAD, ToType(types->first()), name); 8845 if (!info.CanAccessAsMonomorphic(types)) { 8846 HandlePolymorphicCallNamed(expr, receiver, types, name); 8847 return; 8848 } 8849 } 8850 8851 HValue* key = NULL; 8852 if (!prop->key()->IsPropertyName()) { 8853 CHECK_ALIVE(VisitForValue(prop->key())); 8854 key = Pop(); 8855 } 8856 8857 CHECK_ALIVE(PushLoad(prop, receiver, key)); 8858 HValue* function = Pop(); 8859 8860 if (FLAG_hydrogen_track_positions) SetSourcePosition(expr->position()); 8861 8862 // Push the function under the receiver. 8863 environment()->SetExpressionStackAt(0, function); 8864 8865 Push(receiver); 8866 8867 if (function->IsConstant() && 8868 HConstant::cast(function)->handle(isolate())->IsJSFunction()) { 8869 Handle<JSFunction> known_function = Handle<JSFunction>::cast( 8870 HConstant::cast(function)->handle(isolate())); 8871 expr->set_target(known_function); 8872 8873 if (TryCallApply(expr)) return; 8874 CHECK_ALIVE(VisitExpressions(expr->arguments())); 8875 8876 Handle<Map> map = types->length() == 1 ? types->first() : Handle<Map>(); 8877 if (TryInlineBuiltinMethodCall(expr, receiver, map)) { 8878 if (FLAG_trace_inlining) { 8879 PrintF("Inlining builtin "); 8880 known_function->ShortPrint(); 8881 PrintF("\n"); 8882 } 8883 return; 8884 } 8885 if (TryInlineApiMethodCall(expr, receiver, types)) return; 8886 8887 // Wrap the receiver if necessary. 8888 if (NeedsWrappingFor(ToType(types->first()), known_function)) { 8889 // Since HWrapReceiver currently cannot actually wrap numbers and 8890 // strings, use the regular CallFunctionStub for method calls to wrap 8891 // the receiver. 8892 // TODO(verwaest): Support creation of value wrappers directly in 8893 // HWrapReceiver. 8894 call = New<HCallFunction>( 8895 function, argument_count, WRAP_AND_CALL); 8896 } else if (TryInlineCall(expr)) { 8897 return; 8898 } else { 8899 call = BuildCallConstantFunction(known_function, argument_count); 8900 } 8901 8902 } else { 8903 CHECK_ALIVE(VisitExpressions(expr->arguments())); 8904 CallFunctionFlags flags = receiver->type().IsJSObject() 8905 ? NO_CALL_FUNCTION_FLAGS : CALL_AS_METHOD; 8906 call = New<HCallFunction>(function, argument_count, flags); 8907 } 8908 PushArgumentsFromEnvironment(argument_count); 8909 8910 } else { 8911 VariableProxy* proxy = expr->expression()->AsVariableProxy(); 8912 if (proxy != NULL && proxy->var()->is_possibly_eval(isolate())) { 8913 return Bailout(kPossibleDirectCallToEval); 8914 } 8915 8916 // The function is on the stack in the unoptimized code during 8917 // evaluation of the arguments. 8918 CHECK_ALIVE(VisitForValue(expr->expression())); 8919 HValue* function = Top(); 8920 if (expr->global_call()) { 8921 Variable* var = proxy->var(); 8922 bool known_global_function = false; 8923 // If there is a global property cell for the name at compile time and 8924 // access check is not enabled we assume that the function will not change 8925 // and generate optimized code for calling the function. 8926 LookupResult lookup(isolate()); 8927 GlobalPropertyAccess type = LookupGlobalProperty(var, &lookup, LOAD); 8928 if (type == kUseCell && 8929 !current_info()->global_object()->IsAccessCheckNeeded()) { 8930 Handle<GlobalObject> global(current_info()->global_object()); 8931 known_global_function = expr->ComputeGlobalTarget(global, &lookup); 8932 } 8933 if (known_global_function) { 8934 Add<HCheckValue>(function, expr->target()); 8935 8936 // Placeholder for the receiver. 8937 Push(graph()->GetConstantUndefined()); 8938 CHECK_ALIVE(VisitExpressions(expr->arguments())); 8939 8940 // Patch the global object on the stack by the expected receiver. 8941 HValue* receiver = ImplicitReceiverFor(function, expr->target()); 8942 const int receiver_index = argument_count - 1; 8943 environment()->SetExpressionStackAt(receiver_index, receiver); 8944 8945 if (TryInlineBuiltinFunctionCall(expr)) { 8946 if (FLAG_trace_inlining) { 8947 PrintF("Inlining builtin "); 8948 expr->target()->ShortPrint(); 8949 PrintF("\n"); 8950 } 8951 return; 8952 } 8953 if (TryInlineApiFunctionCall(expr, receiver)) return; 8954 if (TryHandleArrayCall(expr, function)) return; 8955 if (TryInlineCall(expr)) return; 8956 8957 PushArgumentsFromEnvironment(argument_count); 8958 call = BuildCallConstantFunction(expr->target(), argument_count); 8959 } else { 8960 Push(graph()->GetConstantUndefined()); 8961 CHECK_ALIVE(VisitExpressions(expr->arguments())); 8962 PushArgumentsFromEnvironment(argument_count); 8963 call = New<HCallFunction>(function, argument_count); 8964 } 8965 8966 } else if (expr->IsMonomorphic()) { 8967 Add<HCheckValue>(function, expr->target()); 8968 8969 Push(graph()->GetConstantUndefined()); 8970 CHECK_ALIVE(VisitExpressions(expr->arguments())); 8971 8972 HValue* receiver = ImplicitReceiverFor(function, expr->target()); 8973 const int receiver_index = argument_count - 1; 8974 environment()->SetExpressionStackAt(receiver_index, receiver); 8975 8976 if (TryInlineBuiltinFunctionCall(expr)) { 8977 if (FLAG_trace_inlining) { 8978 PrintF("Inlining builtin "); 8979 expr->target()->ShortPrint(); 8980 PrintF("\n"); 8981 } 8982 return; 8983 } 8984 if (TryInlineApiFunctionCall(expr, receiver)) return; 8985 8986 if (TryInlineCall(expr)) return; 8987 8988 call = PreProcessCall(New<HInvokeFunction>( 8989 function, expr->target(), argument_count)); 8990 8991 } else { 8992 Push(graph()->GetConstantUndefined()); 8993 CHECK_ALIVE(VisitExpressions(expr->arguments())); 8994 PushArgumentsFromEnvironment(argument_count); 8995 call = New<HCallFunction>(function, argument_count); 8996 } 8997 } 8998 8999 Drop(1); // Drop the function. 9000 return ast_context()->ReturnInstruction(call, expr->id()); 9001 } 9002 9003 9004 void HOptimizedGraphBuilder::BuildInlinedCallArray( 9005 Expression* expression, 9006 int argument_count, 9007 Handle<AllocationSite> site) { 9008 ASSERT(!site.is_null()); 9009 ASSERT(argument_count >= 0 && argument_count <= 1); 9010 NoObservableSideEffectsScope no_effects(this); 9011 9012 // We should at least have the constructor on the expression stack. 9013 HValue* constructor = environment()->ExpressionStackAt(argument_count); 9014 9015 // Register on the site for deoptimization if the transition feedback changes. 9016 AllocationSite::AddDependentCompilationInfo( 9017 site, AllocationSite::TRANSITIONS, top_info()); 9018 ElementsKind kind = site->GetElementsKind(); 9019 HInstruction* site_instruction = Add<HConstant>(site); 9020 9021 // In the single constant argument case, we may have to adjust elements kind 9022 // to avoid creating a packed non-empty array. 9023 if (argument_count == 1 && !IsHoleyElementsKind(kind)) { 9024 HValue* argument = environment()->Top(); 9025 if (argument->IsConstant()) { 9026 HConstant* constant_argument = HConstant::cast(argument); 9027 ASSERT(constant_argument->HasSmiValue()); 9028 int constant_array_size = constant_argument->Integer32Value(); 9029 if (constant_array_size != 0) { 9030 kind = GetHoleyElementsKind(kind); 9031 } 9032 } 9033 } 9034 9035 // Build the array. 9036 JSArrayBuilder array_builder(this, 9037 kind, 9038 site_instruction, 9039 constructor, 9040 DISABLE_ALLOCATION_SITES); 9041 HValue* new_object = argument_count == 0 9042 ? array_builder.AllocateEmptyArray() 9043 : BuildAllocateArrayFromLength(&array_builder, Top()); 9044 9045 int args_to_drop = argument_count + (expression->IsCall() ? 2 : 1); 9046 Drop(args_to_drop); 9047 ast_context()->ReturnValue(new_object); 9048 } 9049 9050 9051 // Checks whether allocation using the given constructor can be inlined. 9052 static bool IsAllocationInlineable(Handle<JSFunction> constructor) { 9053 return constructor->has_initial_map() && 9054 constructor->initial_map()->instance_type() == JS_OBJECT_TYPE && 9055 constructor->initial_map()->instance_size() < HAllocate::kMaxInlineSize && 9056 constructor->initial_map()->InitialPropertiesLength() == 0; 9057 } 9058 9059 9060 bool HOptimizedGraphBuilder::IsCallArrayInlineable( 9061 int argument_count, 9062 Handle<AllocationSite> site) { 9063 Handle<JSFunction> caller = current_info()->closure(); 9064 Handle<JSFunction> target = array_function(); 9065 // We should have the function plus array arguments on the environment stack. 9066 ASSERT(environment()->length() >= (argument_count + 1)); 9067 ASSERT(!site.is_null()); 9068 9069 bool inline_ok = false; 9070 if (site->CanInlineCall()) { 9071 // We also want to avoid inlining in certain 1 argument scenarios. 9072 if (argument_count == 1) { 9073 HValue* argument = Top(); 9074 if (argument->IsConstant()) { 9075 // Do not inline if the constant length argument is not a smi or 9076 // outside the valid range for unrolled loop initialization. 9077 HConstant* constant_argument = HConstant::cast(argument); 9078 if (constant_argument->HasSmiValue()) { 9079 int value = constant_argument->Integer32Value(); 9080 inline_ok = value >= 0 && value <= kElementLoopUnrollThreshold; 9081 if (!inline_ok) { 9082 TraceInline(target, caller, 9083 "Constant length outside of valid inlining range."); 9084 } 9085 } 9086 } else { 9087 TraceInline(target, caller, 9088 "Dont inline [new] Array(n) where n isn't constant."); 9089 } 9090 } else if (argument_count == 0) { 9091 inline_ok = true; 9092 } else { 9093 TraceInline(target, caller, "Too many arguments to inline."); 9094 } 9095 } else { 9096 TraceInline(target, caller, "AllocationSite requested no inlining."); 9097 } 9098 9099 if (inline_ok) { 9100 TraceInline(target, caller, NULL); 9101 } 9102 return inline_ok; 9103 } 9104 9105 9106 void HOptimizedGraphBuilder::VisitCallNew(CallNew* expr) { 9107 ASSERT(!HasStackOverflow()); 9108 ASSERT(current_block() != NULL); 9109 ASSERT(current_block()->HasPredecessor()); 9110 if (!FLAG_hydrogen_track_positions) SetSourcePosition(expr->position()); 9111 int argument_count = expr->arguments()->length() + 1; // Plus constructor. 9112 Factory* factory = isolate()->factory(); 9113 9114 // The constructor function is on the stack in the unoptimized code 9115 // during evaluation of the arguments. 9116 CHECK_ALIVE(VisitForValue(expr->expression())); 9117 HValue* function = Top(); 9118 CHECK_ALIVE(VisitExpressions(expr->arguments())); 9119 9120 if (FLAG_inline_construct && 9121 expr->IsMonomorphic() && 9122 IsAllocationInlineable(expr->target())) { 9123 Handle<JSFunction> constructor = expr->target(); 9124 HValue* check = Add<HCheckValue>(function, constructor); 9125 9126 // Force completion of inobject slack tracking before generating 9127 // allocation code to finalize instance size. 9128 if (constructor->IsInobjectSlackTrackingInProgress()) { 9129 constructor->CompleteInobjectSlackTracking(); 9130 } 9131 9132 // Calculate instance size from initial map of constructor. 9133 ASSERT(constructor->has_initial_map()); 9134 Handle<Map> initial_map(constructor->initial_map()); 9135 int instance_size = initial_map->instance_size(); 9136 ASSERT(initial_map->InitialPropertiesLength() == 0); 9137 9138 // Allocate an instance of the implicit receiver object. 9139 HValue* size_in_bytes = Add<HConstant>(instance_size); 9140 HAllocationMode allocation_mode; 9141 if (FLAG_pretenuring_call_new) { 9142 if (FLAG_allocation_site_pretenuring) { 9143 // Try to use pretenuring feedback. 9144 Handle<AllocationSite> allocation_site = expr->allocation_site(); 9145 allocation_mode = HAllocationMode(allocation_site); 9146 // Take a dependency on allocation site. 9147 AllocationSite::AddDependentCompilationInfo(allocation_site, 9148 AllocationSite::TENURING, 9149 top_info()); 9150 } 9151 } 9152 9153 HAllocate* receiver = BuildAllocate( 9154 size_in_bytes, HType::JSObject(), JS_OBJECT_TYPE, allocation_mode); 9155 receiver->set_known_initial_map(initial_map); 9156 9157 // Initialize map and fields of the newly allocated object. 9158 { NoObservableSideEffectsScope no_effects(this); 9159 ASSERT(initial_map->instance_type() == JS_OBJECT_TYPE); 9160 Add<HStoreNamedField>(receiver, 9161 HObjectAccess::ForMapAndOffset(initial_map, JSObject::kMapOffset), 9162 Add<HConstant>(initial_map)); 9163 HValue* empty_fixed_array = Add<HConstant>(factory->empty_fixed_array()); 9164 Add<HStoreNamedField>(receiver, 9165 HObjectAccess::ForMapAndOffset(initial_map, 9166 JSObject::kPropertiesOffset), 9167 empty_fixed_array); 9168 Add<HStoreNamedField>(receiver, 9169 HObjectAccess::ForMapAndOffset(initial_map, 9170 JSObject::kElementsOffset), 9171 empty_fixed_array); 9172 if (initial_map->inobject_properties() != 0) { 9173 HConstant* undefined = graph()->GetConstantUndefined(); 9174 for (int i = 0; i < initial_map->inobject_properties(); i++) { 9175 int property_offset = initial_map->GetInObjectPropertyOffset(i); 9176 Add<HStoreNamedField>(receiver, 9177 HObjectAccess::ForMapAndOffset(initial_map, property_offset), 9178 undefined); 9179 } 9180 } 9181 } 9182 9183 // Replace the constructor function with a newly allocated receiver using 9184 // the index of the receiver from the top of the expression stack. 9185 const int receiver_index = argument_count - 1; 9186 ASSERT(environment()->ExpressionStackAt(receiver_index) == function); 9187 environment()->SetExpressionStackAt(receiver_index, receiver); 9188 9189 if (TryInlineConstruct(expr, receiver)) { 9190 // Inlining worked, add a dependency on the initial map to make sure that 9191 // this code is deoptimized whenever the initial map of the constructor 9192 // changes. 9193 Map::AddDependentCompilationInfo( 9194 initial_map, DependentCode::kInitialMapChangedGroup, top_info()); 9195 return; 9196 } 9197 9198 // TODO(mstarzinger): For now we remove the previous HAllocate and all 9199 // corresponding instructions and instead add HPushArguments for the 9200 // arguments in case inlining failed. What we actually should do is for 9201 // inlining to try to build a subgraph without mutating the parent graph. 9202 HInstruction* instr = current_block()->last(); 9203 do { 9204 HInstruction* prev_instr = instr->previous(); 9205 instr->DeleteAndReplaceWith(NULL); 9206 instr = prev_instr; 9207 } while (instr != check); 9208 environment()->SetExpressionStackAt(receiver_index, function); 9209 HInstruction* call = 9210 PreProcessCall(New<HCallNew>(function, argument_count)); 9211 return ast_context()->ReturnInstruction(call, expr->id()); 9212 } else { 9213 // The constructor function is both an operand to the instruction and an 9214 // argument to the construct call. 9215 if (TryHandleArrayCallNew(expr, function)) return; 9216 9217 HInstruction* call = 9218 PreProcessCall(New<HCallNew>(function, argument_count)); 9219 return ast_context()->ReturnInstruction(call, expr->id()); 9220 } 9221 } 9222 9223 9224 // Support for generating inlined runtime functions. 9225 9226 // Lookup table for generators for runtime calls that are generated inline. 9227 // Elements of the table are member pointers to functions of 9228 // HOptimizedGraphBuilder. 9229 #define INLINE_FUNCTION_GENERATOR_ADDRESS(Name, argc, ressize) \ 9230 &HOptimizedGraphBuilder::Generate##Name, 9231 9232 const HOptimizedGraphBuilder::InlineFunctionGenerator 9233 HOptimizedGraphBuilder::kInlineFunctionGenerators[] = { 9234 INLINE_FUNCTION_LIST(INLINE_FUNCTION_GENERATOR_ADDRESS) 9235 INLINE_OPTIMIZED_FUNCTION_LIST(INLINE_FUNCTION_GENERATOR_ADDRESS) 9236 }; 9237 #undef INLINE_FUNCTION_GENERATOR_ADDRESS 9238 9239 9240 template <class ViewClass> 9241 void HGraphBuilder::BuildArrayBufferViewInitialization( 9242 HValue* obj, 9243 HValue* buffer, 9244 HValue* byte_offset, 9245 HValue* byte_length) { 9246 9247 for (int offset = ViewClass::kSize; 9248 offset < ViewClass::kSizeWithInternalFields; 9249 offset += kPointerSize) { 9250 Add<HStoreNamedField>(obj, 9251 HObjectAccess::ForObservableJSObjectOffset(offset), 9252 graph()->GetConstant0()); 9253 } 9254 9255 Add<HStoreNamedField>( 9256 obj, 9257 HObjectAccess::ForJSArrayBufferViewByteOffset(), 9258 byte_offset); 9259 Add<HStoreNamedField>( 9260 obj, 9261 HObjectAccess::ForJSArrayBufferViewByteLength(), 9262 byte_length); 9263 9264 if (buffer != NULL) { 9265 Add<HStoreNamedField>( 9266 obj, 9267 HObjectAccess::ForJSArrayBufferViewBuffer(), buffer); 9268 HObjectAccess weak_first_view_access = 9269 HObjectAccess::ForJSArrayBufferWeakFirstView(); 9270 Add<HStoreNamedField>(obj, 9271 HObjectAccess::ForJSArrayBufferViewWeakNext(), 9272 Add<HLoadNamedField>(buffer, 9273 static_cast<HValue*>(NULL), 9274 weak_first_view_access)); 9275 Add<HStoreNamedField>(buffer, weak_first_view_access, obj); 9276 } else { 9277 Add<HStoreNamedField>( 9278 obj, 9279 HObjectAccess::ForJSArrayBufferViewBuffer(), 9280 Add<HConstant>(static_cast<int32_t>(0))); 9281 Add<HStoreNamedField>(obj, 9282 HObjectAccess::ForJSArrayBufferViewWeakNext(), 9283 graph()->GetConstantUndefined()); 9284 } 9285 } 9286 9287 9288 void HOptimizedGraphBuilder::GenerateDataViewInitialize( 9289 CallRuntime* expr) { 9290 ZoneList<Expression*>* arguments = expr->arguments(); 9291 9292 ASSERT(arguments->length()== 4); 9293 CHECK_ALIVE(VisitForValue(arguments->at(0))); 9294 HValue* obj = Pop(); 9295 9296 CHECK_ALIVE(VisitForValue(arguments->at(1))); 9297 HValue* buffer = Pop(); 9298 9299 CHECK_ALIVE(VisitForValue(arguments->at(2))); 9300 HValue* byte_offset = Pop(); 9301 9302 CHECK_ALIVE(VisitForValue(arguments->at(3))); 9303 HValue* byte_length = Pop(); 9304 9305 { 9306 NoObservableSideEffectsScope scope(this); 9307 BuildArrayBufferViewInitialization<JSDataView>( 9308 obj, buffer, byte_offset, byte_length); 9309 } 9310 } 9311 9312 9313 static Handle<Map> TypedArrayMap(Isolate* isolate, 9314 ExternalArrayType array_type, 9315 ElementsKind target_kind) { 9316 Handle<Context> native_context = isolate->native_context(); 9317 Handle<JSFunction> fun; 9318 switch (array_type) { 9319 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \ 9320 case kExternal##Type##Array: \ 9321 fun = Handle<JSFunction>(native_context->type##_array_fun()); \ 9322 break; 9323 9324 TYPED_ARRAYS(TYPED_ARRAY_CASE) 9325 #undef TYPED_ARRAY_CASE 9326 } 9327 Handle<Map> map(fun->initial_map()); 9328 return Map::AsElementsKind(map, target_kind); 9329 } 9330 9331 9332 HValue* HOptimizedGraphBuilder::BuildAllocateExternalElements( 9333 ExternalArrayType array_type, 9334 bool is_zero_byte_offset, 9335 HValue* buffer, HValue* byte_offset, HValue* length) { 9336 Handle<Map> external_array_map( 9337 isolate()->heap()->MapForExternalArrayType(array_type)); 9338 9339 // The HForceRepresentation is to prevent possible deopt on int-smi 9340 // conversion after allocation but before the new object fields are set. 9341 length = AddUncasted<HForceRepresentation>(length, Representation::Smi()); 9342 HValue* elements = 9343 Add<HAllocate>( 9344 Add<HConstant>(ExternalArray::kAlignedSize), 9345 HType::HeapObject(), 9346 NOT_TENURED, 9347 external_array_map->instance_type()); 9348 9349 AddStoreMapConstant(elements, external_array_map); 9350 Add<HStoreNamedField>(elements, 9351 HObjectAccess::ForFixedArrayLength(), length); 9352 9353 HValue* backing_store = Add<HLoadNamedField>( 9354 buffer, static_cast<HValue*>(NULL), 9355 HObjectAccess::ForJSArrayBufferBackingStore()); 9356 9357 HValue* typed_array_start; 9358 if (is_zero_byte_offset) { 9359 typed_array_start = backing_store; 9360 } else { 9361 HInstruction* external_pointer = 9362 AddUncasted<HAdd>(backing_store, byte_offset); 9363 // Arguments are checked prior to call to TypedArrayInitialize, 9364 // including byte_offset. 9365 external_pointer->ClearFlag(HValue::kCanOverflow); 9366 typed_array_start = external_pointer; 9367 } 9368 9369 Add<HStoreNamedField>(elements, 9370 HObjectAccess::ForExternalArrayExternalPointer(), 9371 typed_array_start); 9372 9373 return elements; 9374 } 9375 9376 9377 HValue* HOptimizedGraphBuilder::BuildAllocateFixedTypedArray( 9378 ExternalArrayType array_type, size_t element_size, 9379 ElementsKind fixed_elements_kind, 9380 HValue* byte_length, HValue* length) { 9381 STATIC_ASSERT( 9382 (FixedTypedArrayBase::kHeaderSize & kObjectAlignmentMask) == 0); 9383 HValue* total_size; 9384 9385 // if fixed array's elements are not aligned to object's alignment, 9386 // we need to align the whole array to object alignment. 9387 if (element_size % kObjectAlignment != 0) { 9388 total_size = BuildObjectSizeAlignment( 9389 byte_length, FixedTypedArrayBase::kHeaderSize); 9390 } else { 9391 total_size = AddUncasted<HAdd>(byte_length, 9392 Add<HConstant>(FixedTypedArrayBase::kHeaderSize)); 9393 total_size->ClearFlag(HValue::kCanOverflow); 9394 } 9395 9396 // The HForceRepresentation is to prevent possible deopt on int-smi 9397 // conversion after allocation but before the new object fields are set. 9398 length = AddUncasted<HForceRepresentation>(length, Representation::Smi()); 9399 Handle<Map> fixed_typed_array_map( 9400 isolate()->heap()->MapForFixedTypedArray(array_type)); 9401 HValue* elements = 9402 Add<HAllocate>(total_size, HType::HeapObject(), 9403 NOT_TENURED, fixed_typed_array_map->instance_type()); 9404 AddStoreMapConstant(elements, fixed_typed_array_map); 9405 9406 Add<HStoreNamedField>(elements, 9407 HObjectAccess::ForFixedArrayLength(), 9408 length); 9409 9410 HValue* filler = Add<HConstant>(static_cast<int32_t>(0)); 9411 9412 { 9413 LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement); 9414 9415 HValue* key = builder.BeginBody( 9416 Add<HConstant>(static_cast<int32_t>(0)), 9417 length, Token::LT); 9418 Add<HStoreKeyed>(elements, key, filler, fixed_elements_kind); 9419 9420 builder.EndBody(); 9421 } 9422 return elements; 9423 } 9424 9425 9426 void HOptimizedGraphBuilder::GenerateTypedArrayInitialize( 9427 CallRuntime* expr) { 9428 ZoneList<Expression*>* arguments = expr->arguments(); 9429 9430 static const int kObjectArg = 0; 9431 static const int kArrayIdArg = 1; 9432 static const int kBufferArg = 2; 9433 static const int kByteOffsetArg = 3; 9434 static const int kByteLengthArg = 4; 9435 static const int kArgsLength = 5; 9436 ASSERT(arguments->length() == kArgsLength); 9437 9438 9439 CHECK_ALIVE(VisitForValue(arguments->at(kObjectArg))); 9440 HValue* obj = Pop(); 9441 9442 if (arguments->at(kArrayIdArg)->IsLiteral()) { 9443 // This should never happen in real use, but can happen when fuzzing. 9444 // Just bail out. 9445 Bailout(kNeedSmiLiteral); 9446 return; 9447 } 9448 Handle<Object> value = 9449 static_cast<Literal*>(arguments->at(kArrayIdArg))->value(); 9450 if (!value->IsSmi()) { 9451 // This should never happen in real use, but can happen when fuzzing. 9452 // Just bail out. 9453 Bailout(kNeedSmiLiteral); 9454 return; 9455 } 9456 int array_id = Smi::cast(*value)->value(); 9457 9458 HValue* buffer; 9459 if (!arguments->at(kBufferArg)->IsNullLiteral()) { 9460 CHECK_ALIVE(VisitForValue(arguments->at(kBufferArg))); 9461 buffer = Pop(); 9462 } else { 9463 buffer = NULL; 9464 } 9465 9466 HValue* byte_offset; 9467 bool is_zero_byte_offset; 9468 9469 if (arguments->at(kByteOffsetArg)->IsLiteral() 9470 && Smi::FromInt(0) == 9471 *static_cast<Literal*>(arguments->at(kByteOffsetArg))->value()) { 9472 byte_offset = Add<HConstant>(static_cast<int32_t>(0)); 9473 is_zero_byte_offset = true; 9474 } else { 9475 CHECK_ALIVE(VisitForValue(arguments->at(kByteOffsetArg))); 9476 byte_offset = Pop(); 9477 is_zero_byte_offset = false; 9478 ASSERT(buffer != NULL); 9479 } 9480 9481 CHECK_ALIVE(VisitForValue(arguments->at(kByteLengthArg))); 9482 HValue* byte_length = Pop(); 9483 9484 NoObservableSideEffectsScope scope(this); 9485 IfBuilder byte_offset_smi(this); 9486 9487 if (!is_zero_byte_offset) { 9488 byte_offset_smi.If<HIsSmiAndBranch>(byte_offset); 9489 byte_offset_smi.Then(); 9490 } 9491 9492 ExternalArrayType array_type = 9493 kExternalInt8Array; // Bogus initialization. 9494 size_t element_size = 1; // Bogus initialization. 9495 ElementsKind external_elements_kind = // Bogus initialization. 9496 EXTERNAL_INT8_ELEMENTS; 9497 ElementsKind fixed_elements_kind = // Bogus initialization. 9498 INT8_ELEMENTS; 9499 Runtime::ArrayIdToTypeAndSize(array_id, 9500 &array_type, 9501 &external_elements_kind, 9502 &fixed_elements_kind, 9503 &element_size); 9504 9505 9506 { // byte_offset is Smi. 9507 BuildArrayBufferViewInitialization<JSTypedArray>( 9508 obj, buffer, byte_offset, byte_length); 9509 9510 9511 HInstruction* length = AddUncasted<HDiv>(byte_length, 9512 Add<HConstant>(static_cast<int32_t>(element_size))); 9513 9514 Add<HStoreNamedField>(obj, 9515 HObjectAccess::ForJSTypedArrayLength(), 9516 length); 9517 9518 HValue* elements; 9519 if (buffer != NULL) { 9520 elements = BuildAllocateExternalElements( 9521 array_type, is_zero_byte_offset, buffer, byte_offset, length); 9522 Handle<Map> obj_map = TypedArrayMap( 9523 isolate(), array_type, external_elements_kind); 9524 AddStoreMapConstant(obj, obj_map); 9525 } else { 9526 ASSERT(is_zero_byte_offset); 9527 elements = BuildAllocateFixedTypedArray( 9528 array_type, element_size, fixed_elements_kind, 9529 byte_length, length); 9530 } 9531 Add<HStoreNamedField>( 9532 obj, HObjectAccess::ForElementsPointer(), elements); 9533 } 9534 9535 if (!is_zero_byte_offset) { 9536 byte_offset_smi.Else(); 9537 { // byte_offset is not Smi. 9538 Push(obj); 9539 CHECK_ALIVE(VisitForValue(arguments->at(kArrayIdArg))); 9540 Push(buffer); 9541 Push(byte_offset); 9542 Push(byte_length); 9543 PushArgumentsFromEnvironment(kArgsLength); 9544 Add<HCallRuntime>(expr->name(), expr->function(), kArgsLength); 9545 } 9546 } 9547 byte_offset_smi.End(); 9548 } 9549 9550 9551 void HOptimizedGraphBuilder::GenerateMaxSmi(CallRuntime* expr) { 9552 ASSERT(expr->arguments()->length() == 0); 9553 HConstant* max_smi = New<HConstant>(static_cast<int32_t>(Smi::kMaxValue)); 9554 return ast_context()->ReturnInstruction(max_smi, expr->id()); 9555 } 9556 9557 9558 void HOptimizedGraphBuilder::GenerateTypedArrayMaxSizeInHeap( 9559 CallRuntime* expr) { 9560 ASSERT(expr->arguments()->length() == 0); 9561 HConstant* result = New<HConstant>(static_cast<int32_t>( 9562 FLAG_typed_array_max_size_in_heap)); 9563 return ast_context()->ReturnInstruction(result, expr->id()); 9564 } 9565 9566 9567 void HOptimizedGraphBuilder::GenerateArrayBufferGetByteLength( 9568 CallRuntime* expr) { 9569 ASSERT(expr->arguments()->length() == 1); 9570 CHECK_ALIVE(VisitForValue(expr->arguments()->at(0))); 9571 HValue* buffer = Pop(); 9572 HInstruction* result = New<HLoadNamedField>( 9573 buffer, 9574 static_cast<HValue*>(NULL), 9575 HObjectAccess::ForJSArrayBufferByteLength()); 9576 return ast_context()->ReturnInstruction(result, expr->id()); 9577 } 9578 9579 9580 void HOptimizedGraphBuilder::GenerateArrayBufferViewGetByteLength( 9581 CallRuntime* expr) { 9582 ASSERT(expr->arguments()->length() == 1); 9583 CHECK_ALIVE(VisitForValue(expr->arguments()->at(0))); 9584 HValue* buffer = Pop(); 9585 HInstruction* result = New<HLoadNamedField>( 9586 buffer, 9587 static_cast<HValue*>(NULL), 9588 HObjectAccess::ForJSArrayBufferViewByteLength()); 9589 return ast_context()->ReturnInstruction(result, expr->id()); 9590 } 9591 9592 9593 void HOptimizedGraphBuilder::GenerateArrayBufferViewGetByteOffset( 9594 CallRuntime* expr) { 9595 ASSERT(expr->arguments()->length() == 1); 9596 CHECK_ALIVE(VisitForValue(expr->arguments()->at(0))); 9597 HValue* buffer = Pop(); 9598 HInstruction* result = New<HLoadNamedField>( 9599 buffer, 9600 static_cast<HValue*>(NULL), 9601 HObjectAccess::ForJSArrayBufferViewByteOffset()); 9602 return ast_context()->ReturnInstruction(result, expr->id()); 9603 } 9604 9605 9606 void HOptimizedGraphBuilder::GenerateTypedArrayGetLength( 9607 CallRuntime* expr) { 9608 ASSERT(expr->arguments()->length() == 1); 9609 CHECK_ALIVE(VisitForValue(expr->arguments()->at(0))); 9610 HValue* buffer = Pop(); 9611 HInstruction* result = New<HLoadNamedField>( 9612 buffer, 9613 static_cast<HValue*>(NULL), 9614 HObjectAccess::ForJSTypedArrayLength()); 9615 return ast_context()->ReturnInstruction(result, expr->id()); 9616 } 9617 9618 9619 void HOptimizedGraphBuilder::VisitCallRuntime(CallRuntime* expr) { 9620 ASSERT(!HasStackOverflow()); 9621 ASSERT(current_block() != NULL); 9622 ASSERT(current_block()->HasPredecessor()); 9623 if (expr->is_jsruntime()) { 9624 return Bailout(kCallToAJavaScriptRuntimeFunction); 9625 } 9626 9627 const Runtime::Function* function = expr->function(); 9628 ASSERT(function != NULL); 9629 9630 if (function->intrinsic_type == Runtime::INLINE || 9631 function->intrinsic_type == Runtime::INLINE_OPTIMIZED) { 9632 ASSERT(expr->name()->length() > 0); 9633 ASSERT(expr->name()->Get(0) == '_'); 9634 // Call to an inline function. 9635 int lookup_index = static_cast<int>(function->function_id) - 9636 static_cast<int>(Runtime::kFirstInlineFunction); 9637 ASSERT(lookup_index >= 0); 9638 ASSERT(static_cast<size_t>(lookup_index) < 9639 ARRAY_SIZE(kInlineFunctionGenerators)); 9640 InlineFunctionGenerator generator = kInlineFunctionGenerators[lookup_index]; 9641 9642 // Call the inline code generator using the pointer-to-member. 9643 (this->*generator)(expr); 9644 } else { 9645 ASSERT(function->intrinsic_type == Runtime::RUNTIME); 9646 Handle<String> name = expr->name(); 9647 int argument_count = expr->arguments()->length(); 9648 CHECK_ALIVE(VisitExpressions(expr->arguments())); 9649 PushArgumentsFromEnvironment(argument_count); 9650 HCallRuntime* call = New<HCallRuntime>(name, function, 9651 argument_count); 9652 return ast_context()->ReturnInstruction(call, expr->id()); 9653 } 9654 } 9655 9656 9657 void HOptimizedGraphBuilder::VisitUnaryOperation(UnaryOperation* expr) { 9658 ASSERT(!HasStackOverflow()); 9659 ASSERT(current_block() != NULL); 9660 ASSERT(current_block()->HasPredecessor()); 9661 switch (expr->op()) { 9662 case Token::DELETE: return VisitDelete(expr); 9663 case Token::VOID: return VisitVoid(expr); 9664 case Token::TYPEOF: return VisitTypeof(expr); 9665 case Token::NOT: return VisitNot(expr); 9666 default: UNREACHABLE(); 9667 } 9668 } 9669 9670 9671 void HOptimizedGraphBuilder::VisitDelete(UnaryOperation* expr) { 9672 Property* prop = expr->expression()->AsProperty(); 9673 VariableProxy* proxy = expr->expression()->AsVariableProxy(); 9674 if (prop != NULL) { 9675 CHECK_ALIVE(VisitForValue(prop->obj())); 9676 CHECK_ALIVE(VisitForValue(prop->key())); 9677 HValue* key = Pop(); 9678 HValue* obj = Pop(); 9679 HValue* function = AddLoadJSBuiltin(Builtins::DELETE); 9680 Add<HPushArguments>(obj, key, Add<HConstant>(function_strict_mode())); 9681 // TODO(olivf) InvokeFunction produces a check for the parameter count, 9682 // even though we are certain to pass the correct number of arguments here. 9683 HInstruction* instr = New<HInvokeFunction>(function, 3); 9684 return ast_context()->ReturnInstruction(instr, expr->id()); 9685 } else if (proxy != NULL) { 9686 Variable* var = proxy->var(); 9687 if (var->IsUnallocated()) { 9688 Bailout(kDeleteWithGlobalVariable); 9689 } else if (var->IsStackAllocated() || var->IsContextSlot()) { 9690 // Result of deleting non-global variables is false. 'this' is not 9691 // really a variable, though we implement it as one. The 9692 // subexpression does not have side effects. 9693 HValue* value = var->is_this() 9694 ? graph()->GetConstantTrue() 9695 : graph()->GetConstantFalse(); 9696 return ast_context()->ReturnValue(value); 9697 } else { 9698 Bailout(kDeleteWithNonGlobalVariable); 9699 } 9700 } else { 9701 // Result of deleting non-property, non-variable reference is true. 9702 // Evaluate the subexpression for side effects. 9703 CHECK_ALIVE(VisitForEffect(expr->expression())); 9704 return ast_context()->ReturnValue(graph()->GetConstantTrue()); 9705 } 9706 } 9707 9708 9709 void HOptimizedGraphBuilder::VisitVoid(UnaryOperation* expr) { 9710 CHECK_ALIVE(VisitForEffect(expr->expression())); 9711 return ast_context()->ReturnValue(graph()->GetConstantUndefined()); 9712 } 9713 9714 9715 void HOptimizedGraphBuilder::VisitTypeof(UnaryOperation* expr) { 9716 CHECK_ALIVE(VisitForTypeOf(expr->expression())); 9717 HValue* value = Pop(); 9718 HInstruction* instr = New<HTypeof>(value); 9719 return ast_context()->ReturnInstruction(instr, expr->id()); 9720 } 9721 9722 9723 void HOptimizedGraphBuilder::VisitNot(UnaryOperation* expr) { 9724 if (ast_context()->IsTest()) { 9725 TestContext* context = TestContext::cast(ast_context()); 9726 VisitForControl(expr->expression(), 9727 context->if_false(), 9728 context->if_true()); 9729 return; 9730 } 9731 9732 if (ast_context()->IsEffect()) { 9733 VisitForEffect(expr->expression()); 9734 return; 9735 } 9736 9737 ASSERT(ast_context()->IsValue()); 9738 HBasicBlock* materialize_false = graph()->CreateBasicBlock(); 9739 HBasicBlock* materialize_true = graph()->CreateBasicBlock(); 9740 CHECK_BAILOUT(VisitForControl(expr->expression(), 9741 materialize_false, 9742 materialize_true)); 9743 9744 if (materialize_false->HasPredecessor()) { 9745 materialize_false->SetJoinId(expr->MaterializeFalseId()); 9746 set_current_block(materialize_false); 9747 Push(graph()->GetConstantFalse()); 9748 } else { 9749 materialize_false = NULL; 9750 } 9751 9752 if (materialize_true->HasPredecessor()) { 9753 materialize_true->SetJoinId(expr->MaterializeTrueId()); 9754 set_current_block(materialize_true); 9755 Push(graph()->GetConstantTrue()); 9756 } else { 9757 materialize_true = NULL; 9758 } 9759 9760 HBasicBlock* join = 9761 CreateJoin(materialize_false, materialize_true, expr->id()); 9762 set_current_block(join); 9763 if (join != NULL) return ast_context()->ReturnValue(Pop()); 9764 } 9765 9766 9767 HInstruction* HOptimizedGraphBuilder::BuildIncrement( 9768 bool returns_original_input, 9769 CountOperation* expr) { 9770 // The input to the count operation is on top of the expression stack. 9771 Representation rep = Representation::FromType(expr->type()); 9772 if (rep.IsNone() || rep.IsTagged()) { 9773 rep = Representation::Smi(); 9774 } 9775 9776 if (returns_original_input) { 9777 // We need an explicit HValue representing ToNumber(input). The 9778 // actual HChange instruction we need is (sometimes) added in a later 9779 // phase, so it is not available now to be used as an input to HAdd and 9780 // as the return value. 9781 HInstruction* number_input = AddUncasted<HForceRepresentation>(Pop(), rep); 9782 if (!rep.IsDouble()) { 9783 number_input->SetFlag(HInstruction::kFlexibleRepresentation); 9784 number_input->SetFlag(HInstruction::kCannotBeTagged); 9785 } 9786 Push(number_input); 9787 } 9788 9789 // The addition has no side effects, so we do not need 9790 // to simulate the expression stack after this instruction. 9791 // Any later failures deopt to the load of the input or earlier. 9792 HConstant* delta = (expr->op() == Token::INC) 9793 ? graph()->GetConstant1() 9794 : graph()->GetConstantMinus1(); 9795 HInstruction* instr = AddUncasted<HAdd>(Top(), delta); 9796 if (instr->IsAdd()) { 9797 HAdd* add = HAdd::cast(instr); 9798 add->set_observed_input_representation(1, rep); 9799 add->set_observed_input_representation(2, Representation::Smi()); 9800 } 9801 instr->SetFlag(HInstruction::kCannotBeTagged); 9802 instr->ClearAllSideEffects(); 9803 return instr; 9804 } 9805 9806 9807 void HOptimizedGraphBuilder::BuildStoreForEffect(Expression* expr, 9808 Property* prop, 9809 BailoutId ast_id, 9810 BailoutId return_id, 9811 HValue* object, 9812 HValue* key, 9813 HValue* value) { 9814 EffectContext for_effect(this); 9815 Push(object); 9816 if (key != NULL) Push(key); 9817 Push(value); 9818 BuildStore(expr, prop, ast_id, return_id); 9819 } 9820 9821 9822 void HOptimizedGraphBuilder::VisitCountOperation(CountOperation* expr) { 9823 ASSERT(!HasStackOverflow()); 9824 ASSERT(current_block() != NULL); 9825 ASSERT(current_block()->HasPredecessor()); 9826 if (!FLAG_hydrogen_track_positions) SetSourcePosition(expr->position()); 9827 Expression* target = expr->expression(); 9828 VariableProxy* proxy = target->AsVariableProxy(); 9829 Property* prop = target->AsProperty(); 9830 if (proxy == NULL && prop == NULL) { 9831 return Bailout(kInvalidLhsInCountOperation); 9832 } 9833 9834 // Match the full code generator stack by simulating an extra stack 9835 // element for postfix operations in a non-effect context. The return 9836 // value is ToNumber(input). 9837 bool returns_original_input = 9838 expr->is_postfix() && !ast_context()->IsEffect(); 9839 HValue* input = NULL; // ToNumber(original_input). 9840 HValue* after = NULL; // The result after incrementing or decrementing. 9841 9842 if (proxy != NULL) { 9843 Variable* var = proxy->var(); 9844 if (var->mode() == CONST_LEGACY) { 9845 return Bailout(kUnsupportedCountOperationWithConst); 9846 } 9847 // Argument of the count operation is a variable, not a property. 9848 ASSERT(prop == NULL); 9849 CHECK_ALIVE(VisitForValue(target)); 9850 9851 after = BuildIncrement(returns_original_input, expr); 9852 input = returns_original_input ? Top() : Pop(); 9853 Push(after); 9854 9855 switch (var->location()) { 9856 case Variable::UNALLOCATED: 9857 HandleGlobalVariableAssignment(var, 9858 after, 9859 expr->AssignmentId()); 9860 break; 9861 9862 case Variable::PARAMETER: 9863 case Variable::LOCAL: 9864 BindIfLive(var, after); 9865 break; 9866 9867 case Variable::CONTEXT: { 9868 // Bail out if we try to mutate a parameter value in a function 9869 // using the arguments object. We do not (yet) correctly handle the 9870 // arguments property of the function. 9871 if (current_info()->scope()->arguments() != NULL) { 9872 // Parameters will rewrite to context slots. We have no direct 9873 // way to detect that the variable is a parameter so we use a 9874 // linear search of the parameter list. 9875 int count = current_info()->scope()->num_parameters(); 9876 for (int i = 0; i < count; ++i) { 9877 if (var == current_info()->scope()->parameter(i)) { 9878 return Bailout(kAssignmentToParameterInArgumentsObject); 9879 } 9880 } 9881 } 9882 9883 HValue* context = BuildContextChainWalk(var); 9884 HStoreContextSlot::Mode mode = IsLexicalVariableMode(var->mode()) 9885 ? HStoreContextSlot::kCheckDeoptimize : HStoreContextSlot::kNoCheck; 9886 HStoreContextSlot* instr = Add<HStoreContextSlot>(context, var->index(), 9887 mode, after); 9888 if (instr->HasObservableSideEffects()) { 9889 Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE); 9890 } 9891 break; 9892 } 9893 9894 case Variable::LOOKUP: 9895 return Bailout(kLookupVariableInCountOperation); 9896 } 9897 9898 Drop(returns_original_input ? 2 : 1); 9899 return ast_context()->ReturnValue(expr->is_postfix() ? input : after); 9900 } 9901 9902 // Argument of the count operation is a property. 9903 ASSERT(prop != NULL); 9904 if (returns_original_input) Push(graph()->GetConstantUndefined()); 9905 9906 CHECK_ALIVE(VisitForValue(prop->obj())); 9907 HValue* object = Top(); 9908 9909 HValue* key = NULL; 9910 if ((!prop->IsFunctionPrototype() && !prop->key()->IsPropertyName()) || 9911 prop->IsStringAccess()) { 9912 CHECK_ALIVE(VisitForValue(prop->key())); 9913 key = Top(); 9914 } 9915 9916 CHECK_ALIVE(PushLoad(prop, object, key)); 9917 9918 after = BuildIncrement(returns_original_input, expr); 9919 9920 if (returns_original_input) { 9921 input = Pop(); 9922 // Drop object and key to push it again in the effect context below. 9923 Drop(key == NULL ? 1 : 2); 9924 environment()->SetExpressionStackAt(0, input); 9925 CHECK_ALIVE(BuildStoreForEffect( 9926 expr, prop, expr->id(), expr->AssignmentId(), object, key, after)); 9927 return ast_context()->ReturnValue(Pop()); 9928 } 9929 9930 environment()->SetExpressionStackAt(0, after); 9931 return BuildStore(expr, prop, expr->id(), expr->AssignmentId()); 9932 } 9933 9934 9935 HInstruction* HOptimizedGraphBuilder::BuildStringCharCodeAt( 9936 HValue* string, 9937 HValue* index) { 9938 if (string->IsConstant() && index->IsConstant()) { 9939 HConstant* c_string = HConstant::cast(string); 9940 HConstant* c_index = HConstant::cast(index); 9941 if (c_string->HasStringValue() && c_index->HasNumberValue()) { 9942 int32_t i = c_index->NumberValueAsInteger32(); 9943 Handle<String> s = c_string->StringValue(); 9944 if (i < 0 || i >= s->length()) { 9945 return New<HConstant>(OS::nan_value()); 9946 } 9947 return New<HConstant>(s->Get(i)); 9948 } 9949 } 9950 string = BuildCheckString(string); 9951 index = Add<HBoundsCheck>(index, AddLoadStringLength(string)); 9952 return New<HStringCharCodeAt>(string, index); 9953 } 9954 9955 9956 // Checks if the given shift amounts have following forms: 9957 // (N1) and (N2) with N1 + N2 = 32; (sa) and (32 - sa). 9958 static bool ShiftAmountsAllowReplaceByRotate(HValue* sa, 9959 HValue* const32_minus_sa) { 9960 if (sa->IsConstant() && const32_minus_sa->IsConstant()) { 9961 const HConstant* c1 = HConstant::cast(sa); 9962 const HConstant* c2 = HConstant::cast(const32_minus_sa); 9963 return c1->HasInteger32Value() && c2->HasInteger32Value() && 9964 (c1->Integer32Value() + c2->Integer32Value() == 32); 9965 } 9966 if (!const32_minus_sa->IsSub()) return false; 9967 HSub* sub = HSub::cast(const32_minus_sa); 9968 return sub->left()->EqualsInteger32Constant(32) && sub->right() == sa; 9969 } 9970 9971 9972 // Checks if the left and the right are shift instructions with the oposite 9973 // directions that can be replaced by one rotate right instruction or not. 9974 // Returns the operand and the shift amount for the rotate instruction in the 9975 // former case. 9976 bool HGraphBuilder::MatchRotateRight(HValue* left, 9977 HValue* right, 9978 HValue** operand, 9979 HValue** shift_amount) { 9980 HShl* shl; 9981 HShr* shr; 9982 if (left->IsShl() && right->IsShr()) { 9983 shl = HShl::cast(left); 9984 shr = HShr::cast(right); 9985 } else if (left->IsShr() && right->IsShl()) { 9986 shl = HShl::cast(right); 9987 shr = HShr::cast(left); 9988 } else { 9989 return false; 9990 } 9991 if (shl->left() != shr->left()) return false; 9992 9993 if (!ShiftAmountsAllowReplaceByRotate(shl->right(), shr->right()) && 9994 !ShiftAmountsAllowReplaceByRotate(shr->right(), shl->right())) { 9995 return false; 9996 } 9997 *operand= shr->left(); 9998 *shift_amount = shr->right(); 9999 return true; 10000 } 10001 10002 10003 bool CanBeZero(HValue* right) { 10004 if (right->IsConstant()) { 10005 HConstant* right_const = HConstant::cast(right); 10006 if (right_const->HasInteger32Value() && 10007 (right_const->Integer32Value() & 0x1f) != 0) { 10008 return false; 10009 } 10010 } 10011 return true; 10012 } 10013 10014 10015 HValue* HGraphBuilder::EnforceNumberType(HValue* number, 10016 Type* expected) { 10017 if (expected->Is(Type::SignedSmall())) { 10018 return AddUncasted<HForceRepresentation>(number, Representation::Smi()); 10019 } 10020 if (expected->Is(Type::Signed32())) { 10021 return AddUncasted<HForceRepresentation>(number, 10022 Representation::Integer32()); 10023 } 10024 return number; 10025 } 10026 10027 10028 HValue* HGraphBuilder::TruncateToNumber(HValue* value, Type** expected) { 10029 if (value->IsConstant()) { 10030 HConstant* constant = HConstant::cast(value); 10031 Maybe<HConstant*> number = constant->CopyToTruncatedNumber(zone()); 10032 if (number.has_value) { 10033 *expected = Type::Number(zone()); 10034 return AddInstruction(number.value); 10035 } 10036 } 10037 10038 // We put temporary values on the stack, which don't correspond to anything 10039 // in baseline code. Since nothing is observable we avoid recording those 10040 // pushes with a NoObservableSideEffectsScope. 10041 NoObservableSideEffectsScope no_effects(this); 10042 10043 Type* expected_type = *expected; 10044 10045 // Separate the number type from the rest. 10046 Type* expected_obj = 10047 Type::Intersect(expected_type, Type::NonNumber(zone()), zone()); 10048 Type* expected_number = 10049 Type::Intersect(expected_type, Type::Number(zone()), zone()); 10050 10051 // We expect to get a number. 10052 // (We need to check first, since Type::None->Is(Type::Any()) == true. 10053 if (expected_obj->Is(Type::None())) { 10054 ASSERT(!expected_number->Is(Type::None(zone()))); 10055 return value; 10056 } 10057 10058 if (expected_obj->Is(Type::Undefined(zone()))) { 10059 // This is already done by HChange. 10060 *expected = Type::Union(expected_number, Type::Number(zone()), zone()); 10061 return value; 10062 } 10063 10064 return value; 10065 } 10066 10067 10068 HValue* HOptimizedGraphBuilder::BuildBinaryOperation( 10069 BinaryOperation* expr, 10070 HValue* left, 10071 HValue* right, 10072 PushBeforeSimulateBehavior push_sim_result) { 10073 Type* left_type = expr->left()->bounds().lower; 10074 Type* right_type = expr->right()->bounds().lower; 10075 Type* result_type = expr->bounds().lower; 10076 Maybe<int> fixed_right_arg = expr->fixed_right_arg(); 10077 Handle<AllocationSite> allocation_site = expr->allocation_site(); 10078 10079 HAllocationMode allocation_mode; 10080 if (FLAG_allocation_site_pretenuring && !allocation_site.is_null()) { 10081 allocation_mode = HAllocationMode(allocation_site); 10082 } 10083 10084 HValue* result = HGraphBuilder::BuildBinaryOperation( 10085 expr->op(), left, right, left_type, right_type, result_type, 10086 fixed_right_arg, allocation_mode); 10087 // Add a simulate after instructions with observable side effects, and 10088 // after phis, which are the result of BuildBinaryOperation when we 10089 // inlined some complex subgraph. 10090 if (result->HasObservableSideEffects() || result->IsPhi()) { 10091 if (push_sim_result == PUSH_BEFORE_SIMULATE) { 10092 Push(result); 10093 Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE); 10094 Drop(1); 10095 } else { 10096 Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE); 10097 } 10098 } 10099 return result; 10100 } 10101 10102 10103 HValue* HGraphBuilder::BuildBinaryOperation( 10104 Token::Value op, 10105 HValue* left, 10106 HValue* right, 10107 Type* left_type, 10108 Type* right_type, 10109 Type* result_type, 10110 Maybe<int> fixed_right_arg, 10111 HAllocationMode allocation_mode) { 10112 10113 Representation left_rep = Representation::FromType(left_type); 10114 Representation right_rep = Representation::FromType(right_type); 10115 10116 bool maybe_string_add = op == Token::ADD && 10117 (left_type->Maybe(Type::String()) || 10118 right_type->Maybe(Type::String())); 10119 10120 if (left_type->Is(Type::None())) { 10121 Add<HDeoptimize>("Insufficient type feedback for LHS of binary operation", 10122 Deoptimizer::SOFT); 10123 // TODO(rossberg): we should be able to get rid of non-continuous 10124 // defaults. 10125 left_type = Type::Any(zone()); 10126 } else { 10127 if (!maybe_string_add) left = TruncateToNumber(left, &left_type); 10128 left_rep = Representation::FromType(left_type); 10129 } 10130 10131 if (right_type->Is(Type::None())) { 10132 Add<HDeoptimize>("Insufficient type feedback for RHS of binary operation", 10133 Deoptimizer::SOFT); 10134 right_type = Type::Any(zone()); 10135 } else { 10136 if (!maybe_string_add) right = TruncateToNumber(right, &right_type); 10137 right_rep = Representation::FromType(right_type); 10138 } 10139 10140 // Special case for string addition here. 10141 if (op == Token::ADD && 10142 (left_type->Is(Type::String()) || right_type->Is(Type::String()))) { 10143 // Validate type feedback for left argument. 10144 if (left_type->Is(Type::String())) { 10145 left = BuildCheckString(left); 10146 } 10147 10148 // Validate type feedback for right argument. 10149 if (right_type->Is(Type::String())) { 10150 right = BuildCheckString(right); 10151 } 10152 10153 // Convert left argument as necessary. 10154 if (left_type->Is(Type::Number())) { 10155 ASSERT(right_type->Is(Type::String())); 10156 left = BuildNumberToString(left, left_type); 10157 } else if (!left_type->Is(Type::String())) { 10158 ASSERT(right_type->Is(Type::String())); 10159 HValue* function = AddLoadJSBuiltin(Builtins::STRING_ADD_RIGHT); 10160 Add<HPushArguments>(left, right); 10161 return AddUncasted<HInvokeFunction>(function, 2); 10162 } 10163 10164 // Convert right argument as necessary. 10165 if (right_type->Is(Type::Number())) { 10166 ASSERT(left_type->Is(Type::String())); 10167 right = BuildNumberToString(right, right_type); 10168 } else if (!right_type->Is(Type::String())) { 10169 ASSERT(left_type->Is(Type::String())); 10170 HValue* function = AddLoadJSBuiltin(Builtins::STRING_ADD_LEFT); 10171 Add<HPushArguments>(left, right); 10172 return AddUncasted<HInvokeFunction>(function, 2); 10173 } 10174 10175 // Fast path for empty constant strings. 10176 if (left->IsConstant() && 10177 HConstant::cast(left)->HasStringValue() && 10178 HConstant::cast(left)->StringValue()->length() == 0) { 10179 return right; 10180 } 10181 if (right->IsConstant() && 10182 HConstant::cast(right)->HasStringValue() && 10183 HConstant::cast(right)->StringValue()->length() == 0) { 10184 return left; 10185 } 10186 10187 // Register the dependent code with the allocation site. 10188 if (!allocation_mode.feedback_site().is_null()) { 10189 ASSERT(!graph()->info()->IsStub()); 10190 Handle<AllocationSite> site(allocation_mode.feedback_site()); 10191 AllocationSite::AddDependentCompilationInfo( 10192 site, AllocationSite::TENURING, top_info()); 10193 } 10194 10195 // Inline the string addition into the stub when creating allocation 10196 // mementos to gather allocation site feedback, or if we can statically 10197 // infer that we're going to create a cons string. 10198 if ((graph()->info()->IsStub() && 10199 allocation_mode.CreateAllocationMementos()) || 10200 (left->IsConstant() && 10201 HConstant::cast(left)->HasStringValue() && 10202 HConstant::cast(left)->StringValue()->length() + 1 >= 10203 ConsString::kMinLength) || 10204 (right->IsConstant() && 10205 HConstant::cast(right)->HasStringValue() && 10206 HConstant::cast(right)->StringValue()->length() + 1 >= 10207 ConsString::kMinLength)) { 10208 return BuildStringAdd(left, right, allocation_mode); 10209 } 10210 10211 // Fallback to using the string add stub. 10212 return AddUncasted<HStringAdd>( 10213 left, right, allocation_mode.GetPretenureMode(), 10214 STRING_ADD_CHECK_NONE, allocation_mode.feedback_site()); 10215 } 10216 10217 if (graph()->info()->IsStub()) { 10218 left = EnforceNumberType(left, left_type); 10219 right = EnforceNumberType(right, right_type); 10220 } 10221 10222 Representation result_rep = Representation::FromType(result_type); 10223 10224 bool is_non_primitive = (left_rep.IsTagged() && !left_rep.IsSmi()) || 10225 (right_rep.IsTagged() && !right_rep.IsSmi()); 10226 10227 HInstruction* instr = NULL; 10228 // Only the stub is allowed to call into the runtime, since otherwise we would 10229 // inline several instructions (including the two pushes) for every tagged 10230 // operation in optimized code, which is more expensive, than a stub call. 10231 if (graph()->info()->IsStub() && is_non_primitive) { 10232 HValue* function = AddLoadJSBuiltin(BinaryOpIC::TokenToJSBuiltin(op)); 10233 Add<HPushArguments>(left, right); 10234 instr = AddUncasted<HInvokeFunction>(function, 2); 10235 } else { 10236 switch (op) { 10237 case Token::ADD: 10238 instr = AddUncasted<HAdd>(left, right); 10239 break; 10240 case Token::SUB: 10241 instr = AddUncasted<HSub>(left, right); 10242 break; 10243 case Token::MUL: 10244 instr = AddUncasted<HMul>(left, right); 10245 break; 10246 case Token::MOD: { 10247 if (fixed_right_arg.has_value && 10248 !right->EqualsInteger32Constant(fixed_right_arg.value)) { 10249 HConstant* fixed_right = Add<HConstant>( 10250 static_cast<int>(fixed_right_arg.value)); 10251 IfBuilder if_same(this); 10252 if_same.If<HCompareNumericAndBranch>(right, fixed_right, Token::EQ); 10253 if_same.Then(); 10254 if_same.ElseDeopt("Unexpected RHS of binary operation"); 10255 right = fixed_right; 10256 } 10257 instr = AddUncasted<HMod>(left, right); 10258 break; 10259 } 10260 case Token::DIV: 10261 instr = AddUncasted<HDiv>(left, right); 10262 break; 10263 case Token::BIT_XOR: 10264 case Token::BIT_AND: 10265 instr = AddUncasted<HBitwise>(op, left, right); 10266 break; 10267 case Token::BIT_OR: { 10268 HValue* operand, *shift_amount; 10269 if (left_type->Is(Type::Signed32()) && 10270 right_type->Is(Type::Signed32()) && 10271 MatchRotateRight(left, right, &operand, &shift_amount)) { 10272 instr = AddUncasted<HRor>(operand, shift_amount); 10273 } else { 10274 instr = AddUncasted<HBitwise>(op, left, right); 10275 } 10276 break; 10277 } 10278 case Token::SAR: 10279 instr = AddUncasted<HSar>(left, right); 10280 break; 10281 case Token::SHR: 10282 instr = AddUncasted<HShr>(left, right); 10283 if (FLAG_opt_safe_uint32_operations && instr->IsShr() && 10284 CanBeZero(right)) { 10285 graph()->RecordUint32Instruction(instr); 10286 } 10287 break; 10288 case Token::SHL: 10289 instr = AddUncasted<HShl>(left, right); 10290 break; 10291 default: 10292 UNREACHABLE(); 10293 } 10294 } 10295 10296 if (instr->IsBinaryOperation()) { 10297 HBinaryOperation* binop = HBinaryOperation::cast(instr); 10298 binop->set_observed_input_representation(1, left_rep); 10299 binop->set_observed_input_representation(2, right_rep); 10300 binop->initialize_output_representation(result_rep); 10301 if (graph()->info()->IsStub()) { 10302 // Stub should not call into stub. 10303 instr->SetFlag(HValue::kCannotBeTagged); 10304 // And should truncate on HForceRepresentation already. 10305 if (left->IsForceRepresentation()) { 10306 left->CopyFlag(HValue::kTruncatingToSmi, instr); 10307 left->CopyFlag(HValue::kTruncatingToInt32, instr); 10308 } 10309 if (right->IsForceRepresentation()) { 10310 right->CopyFlag(HValue::kTruncatingToSmi, instr); 10311 right->CopyFlag(HValue::kTruncatingToInt32, instr); 10312 } 10313 } 10314 } 10315 return instr; 10316 } 10317 10318 10319 // Check for the form (%_ClassOf(foo) === 'BarClass'). 10320 static bool IsClassOfTest(CompareOperation* expr) { 10321 if (expr->op() != Token::EQ_STRICT) return false; 10322 CallRuntime* call = expr->left()->AsCallRuntime(); 10323 if (call == NULL) return false; 10324 Literal* literal = expr->right()->AsLiteral(); 10325 if (literal == NULL) return false; 10326 if (!literal->value()->IsString()) return false; 10327 if (!call->name()->IsOneByteEqualTo(STATIC_ASCII_VECTOR("_ClassOf"))) { 10328 return false; 10329 } 10330 ASSERT(call->arguments()->length() == 1); 10331 return true; 10332 } 10333 10334 10335 void HOptimizedGraphBuilder::VisitBinaryOperation(BinaryOperation* expr) { 10336 ASSERT(!HasStackOverflow()); 10337 ASSERT(current_block() != NULL); 10338 ASSERT(current_block()->HasPredecessor()); 10339 switch (expr->op()) { 10340 case Token::COMMA: 10341 return VisitComma(expr); 10342 case Token::OR: 10343 case Token::AND: 10344 return VisitLogicalExpression(expr); 10345 default: 10346 return VisitArithmeticExpression(expr); 10347 } 10348 } 10349 10350 10351 void HOptimizedGraphBuilder::VisitComma(BinaryOperation* expr) { 10352 CHECK_ALIVE(VisitForEffect(expr->left())); 10353 // Visit the right subexpression in the same AST context as the entire 10354 // expression. 10355 Visit(expr->right()); 10356 } 10357 10358 10359 void HOptimizedGraphBuilder::VisitLogicalExpression(BinaryOperation* expr) { 10360 bool is_logical_and = expr->op() == Token::AND; 10361 if (ast_context()->IsTest()) { 10362 TestContext* context = TestContext::cast(ast_context()); 10363 // Translate left subexpression. 10364 HBasicBlock* eval_right = graph()->CreateBasicBlock(); 10365 if (is_logical_and) { 10366 CHECK_BAILOUT(VisitForControl(expr->left(), 10367 eval_right, 10368 context->if_false())); 10369 } else { 10370 CHECK_BAILOUT(VisitForControl(expr->left(), 10371 context->if_true(), 10372 eval_right)); 10373 } 10374 10375 // Translate right subexpression by visiting it in the same AST 10376 // context as the entire expression. 10377 if (eval_right->HasPredecessor()) { 10378 eval_right->SetJoinId(expr->RightId()); 10379 set_current_block(eval_right); 10380 Visit(expr->right()); 10381 } 10382 10383 } else if (ast_context()->IsValue()) { 10384 CHECK_ALIVE(VisitForValue(expr->left())); 10385 ASSERT(current_block() != NULL); 10386 HValue* left_value = Top(); 10387 10388 // Short-circuit left values that always evaluate to the same boolean value. 10389 if (expr->left()->ToBooleanIsTrue() || expr->left()->ToBooleanIsFalse()) { 10390 // l (evals true) && r -> r 10391 // l (evals true) || r -> l 10392 // l (evals false) && r -> l 10393 // l (evals false) || r -> r 10394 if (is_logical_and == expr->left()->ToBooleanIsTrue()) { 10395 Drop(1); 10396 CHECK_ALIVE(VisitForValue(expr->right())); 10397 } 10398 return ast_context()->ReturnValue(Pop()); 10399 } 10400 10401 // We need an extra block to maintain edge-split form. 10402 HBasicBlock* empty_block = graph()->CreateBasicBlock(); 10403 HBasicBlock* eval_right = graph()->CreateBasicBlock(); 10404 ToBooleanStub::Types expected(expr->left()->to_boolean_types()); 10405 HBranch* test = is_logical_and 10406 ? New<HBranch>(left_value, expected, eval_right, empty_block) 10407 : New<HBranch>(left_value, expected, empty_block, eval_right); 10408 FinishCurrentBlock(test); 10409 10410 set_current_block(eval_right); 10411 Drop(1); // Value of the left subexpression. 10412 CHECK_BAILOUT(VisitForValue(expr->right())); 10413 10414 HBasicBlock* join_block = 10415 CreateJoin(empty_block, current_block(), expr->id()); 10416 set_current_block(join_block); 10417 return ast_context()->ReturnValue(Pop()); 10418 10419 } else { 10420 ASSERT(ast_context()->IsEffect()); 10421 // In an effect context, we don't need the value of the left subexpression, 10422 // only its control flow and side effects. We need an extra block to 10423 // maintain edge-split form. 10424 HBasicBlock* empty_block = graph()->CreateBasicBlock(); 10425 HBasicBlock* right_block = graph()->CreateBasicBlock(); 10426 if (is_logical_and) { 10427 CHECK_BAILOUT(VisitForControl(expr->left(), right_block, empty_block)); 10428 } else { 10429 CHECK_BAILOUT(VisitForControl(expr->left(), empty_block, right_block)); 10430 } 10431 10432 // TODO(kmillikin): Find a way to fix this. It's ugly that there are 10433 // actually two empty blocks (one here and one inserted by 10434 // TestContext::BuildBranch, and that they both have an HSimulate though the 10435 // second one is not a merge node, and that we really have no good AST ID to 10436 // put on that first HSimulate. 10437 10438 if (empty_block->HasPredecessor()) { 10439 empty_block->SetJoinId(expr->id()); 10440 } else { 10441 empty_block = NULL; 10442 } 10443 10444 if (right_block->HasPredecessor()) { 10445 right_block->SetJoinId(expr->RightId()); 10446 set_current_block(right_block); 10447 CHECK_BAILOUT(VisitForEffect(expr->right())); 10448 right_block = current_block(); 10449 } else { 10450 right_block = NULL; 10451 } 10452 10453 HBasicBlock* join_block = 10454 CreateJoin(empty_block, right_block, expr->id()); 10455 set_current_block(join_block); 10456 // We did not materialize any value in the predecessor environments, 10457 // so there is no need to handle it here. 10458 } 10459 } 10460 10461 10462 void HOptimizedGraphBuilder::VisitArithmeticExpression(BinaryOperation* expr) { 10463 CHECK_ALIVE(VisitForValue(expr->left())); 10464 CHECK_ALIVE(VisitForValue(expr->right())); 10465 SetSourcePosition(expr->position()); 10466 HValue* right = Pop(); 10467 HValue* left = Pop(); 10468 HValue* result = 10469 BuildBinaryOperation(expr, left, right, 10470 ast_context()->IsEffect() ? NO_PUSH_BEFORE_SIMULATE 10471 : PUSH_BEFORE_SIMULATE); 10472 if (FLAG_hydrogen_track_positions && result->IsBinaryOperation()) { 10473 HBinaryOperation::cast(result)->SetOperandPositions( 10474 zone(), 10475 ScriptPositionToSourcePosition(expr->left()->position()), 10476 ScriptPositionToSourcePosition(expr->right()->position())); 10477 } 10478 return ast_context()->ReturnValue(result); 10479 } 10480 10481 10482 void HOptimizedGraphBuilder::HandleLiteralCompareTypeof(CompareOperation* expr, 10483 Expression* sub_expr, 10484 Handle<String> check) { 10485 CHECK_ALIVE(VisitForTypeOf(sub_expr)); 10486 SetSourcePosition(expr->position()); 10487 HValue* value = Pop(); 10488 HTypeofIsAndBranch* instr = New<HTypeofIsAndBranch>(value, check); 10489 return ast_context()->ReturnControl(instr, expr->id()); 10490 } 10491 10492 10493 static bool IsLiteralCompareBool(Isolate* isolate, 10494 HValue* left, 10495 Token::Value op, 10496 HValue* right) { 10497 return op == Token::EQ_STRICT && 10498 ((left->IsConstant() && 10499 HConstant::cast(left)->handle(isolate)->IsBoolean()) || 10500 (right->IsConstant() && 10501 HConstant::cast(right)->handle(isolate)->IsBoolean())); 10502 } 10503 10504 10505 void HOptimizedGraphBuilder::VisitCompareOperation(CompareOperation* expr) { 10506 ASSERT(!HasStackOverflow()); 10507 ASSERT(current_block() != NULL); 10508 ASSERT(current_block()->HasPredecessor()); 10509 10510 if (!FLAG_hydrogen_track_positions) SetSourcePosition(expr->position()); 10511 10512 // Check for a few fast cases. The AST visiting behavior must be in sync 10513 // with the full codegen: We don't push both left and right values onto 10514 // the expression stack when one side is a special-case literal. 10515 Expression* sub_expr = NULL; 10516 Handle<String> check; 10517 if (expr->IsLiteralCompareTypeof(&sub_expr, &check)) { 10518 return HandleLiteralCompareTypeof(expr, sub_expr, check); 10519 } 10520 if (expr->IsLiteralCompareUndefined(&sub_expr, isolate())) { 10521 return HandleLiteralCompareNil(expr, sub_expr, kUndefinedValue); 10522 } 10523 if (expr->IsLiteralCompareNull(&sub_expr)) { 10524 return HandleLiteralCompareNil(expr, sub_expr, kNullValue); 10525 } 10526 10527 if (IsClassOfTest(expr)) { 10528 CallRuntime* call = expr->left()->AsCallRuntime(); 10529 ASSERT(call->arguments()->length() == 1); 10530 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 10531 HValue* value = Pop(); 10532 Literal* literal = expr->right()->AsLiteral(); 10533 Handle<String> rhs = Handle<String>::cast(literal->value()); 10534 HClassOfTestAndBranch* instr = New<HClassOfTestAndBranch>(value, rhs); 10535 return ast_context()->ReturnControl(instr, expr->id()); 10536 } 10537 10538 Type* left_type = expr->left()->bounds().lower; 10539 Type* right_type = expr->right()->bounds().lower; 10540 Type* combined_type = expr->combined_type(); 10541 10542 CHECK_ALIVE(VisitForValue(expr->left())); 10543 CHECK_ALIVE(VisitForValue(expr->right())); 10544 10545 if (FLAG_hydrogen_track_positions) SetSourcePosition(expr->position()); 10546 10547 HValue* right = Pop(); 10548 HValue* left = Pop(); 10549 Token::Value op = expr->op(); 10550 10551 if (IsLiteralCompareBool(isolate(), left, op, right)) { 10552 HCompareObjectEqAndBranch* result = 10553 New<HCompareObjectEqAndBranch>(left, right); 10554 return ast_context()->ReturnControl(result, expr->id()); 10555 } 10556 10557 if (op == Token::INSTANCEOF) { 10558 // Check to see if the rhs of the instanceof is a global function not 10559 // residing in new space. If it is we assume that the function will stay the 10560 // same. 10561 Handle<JSFunction> target = Handle<JSFunction>::null(); 10562 VariableProxy* proxy = expr->right()->AsVariableProxy(); 10563 bool global_function = (proxy != NULL) && proxy->var()->IsUnallocated(); 10564 if (global_function && 10565 current_info()->has_global_object() && 10566 !current_info()->global_object()->IsAccessCheckNeeded()) { 10567 Handle<String> name = proxy->name(); 10568 Handle<GlobalObject> global(current_info()->global_object()); 10569 LookupResult lookup(isolate()); 10570 global->Lookup(name, &lookup); 10571 if (lookup.IsNormal() && lookup.GetValue()->IsJSFunction()) { 10572 Handle<JSFunction> candidate(JSFunction::cast(lookup.GetValue())); 10573 // If the function is in new space we assume it's more likely to 10574 // change and thus prefer the general IC code. 10575 if (!isolate()->heap()->InNewSpace(*candidate)) { 10576 target = candidate; 10577 } 10578 } 10579 } 10580 10581 // If the target is not null we have found a known global function that is 10582 // assumed to stay the same for this instanceof. 10583 if (target.is_null()) { 10584 HInstanceOf* result = New<HInstanceOf>(left, right); 10585 return ast_context()->ReturnInstruction(result, expr->id()); 10586 } else { 10587 Add<HCheckValue>(right, target); 10588 HInstanceOfKnownGlobal* result = 10589 New<HInstanceOfKnownGlobal>(left, target); 10590 return ast_context()->ReturnInstruction(result, expr->id()); 10591 } 10592 10593 // Code below assumes that we don't fall through. 10594 UNREACHABLE(); 10595 } else if (op == Token::IN) { 10596 HValue* function = AddLoadJSBuiltin(Builtins::IN); 10597 Add<HPushArguments>(left, right); 10598 // TODO(olivf) InvokeFunction produces a check for the parameter count, 10599 // even though we are certain to pass the correct number of arguments here. 10600 HInstruction* result = New<HInvokeFunction>(function, 2); 10601 return ast_context()->ReturnInstruction(result, expr->id()); 10602 } 10603 10604 PushBeforeSimulateBehavior push_behavior = 10605 ast_context()->IsEffect() ? NO_PUSH_BEFORE_SIMULATE 10606 : PUSH_BEFORE_SIMULATE; 10607 HControlInstruction* compare = BuildCompareInstruction( 10608 op, left, right, left_type, right_type, combined_type, 10609 ScriptPositionToSourcePosition(expr->left()->position()), 10610 ScriptPositionToSourcePosition(expr->right()->position()), 10611 push_behavior, expr->id()); 10612 if (compare == NULL) return; // Bailed out. 10613 return ast_context()->ReturnControl(compare, expr->id()); 10614 } 10615 10616 10617 HControlInstruction* HOptimizedGraphBuilder::BuildCompareInstruction( 10618 Token::Value op, 10619 HValue* left, 10620 HValue* right, 10621 Type* left_type, 10622 Type* right_type, 10623 Type* combined_type, 10624 HSourcePosition left_position, 10625 HSourcePosition right_position, 10626 PushBeforeSimulateBehavior push_sim_result, 10627 BailoutId bailout_id) { 10628 // Cases handled below depend on collected type feedback. They should 10629 // soft deoptimize when there is no type feedback. 10630 if (combined_type->Is(Type::None())) { 10631 Add<HDeoptimize>("Insufficient type feedback for combined type " 10632 "of binary operation", 10633 Deoptimizer::SOFT); 10634 combined_type = left_type = right_type = Type::Any(zone()); 10635 } 10636 10637 Representation left_rep = Representation::FromType(left_type); 10638 Representation right_rep = Representation::FromType(right_type); 10639 Representation combined_rep = Representation::FromType(combined_type); 10640 10641 if (combined_type->Is(Type::Receiver())) { 10642 if (Token::IsEqualityOp(op)) { 10643 // HCompareObjectEqAndBranch can only deal with object, so 10644 // exclude numbers. 10645 if ((left->IsConstant() && 10646 HConstant::cast(left)->HasNumberValue()) || 10647 (right->IsConstant() && 10648 HConstant::cast(right)->HasNumberValue())) { 10649 Add<HDeoptimize>("Type mismatch between feedback and constant", 10650 Deoptimizer::SOFT); 10651 // The caller expects a branch instruction, so make it happy. 10652 return New<HBranch>(graph()->GetConstantTrue()); 10653 } 10654 // Can we get away with map check and not instance type check? 10655 HValue* operand_to_check = 10656 left->block()->block_id() < right->block()->block_id() ? left : right; 10657 if (combined_type->IsClass()) { 10658 Handle<Map> map = combined_type->AsClass()->Map(); 10659 AddCheckMap(operand_to_check, map); 10660 HCompareObjectEqAndBranch* result = 10661 New<HCompareObjectEqAndBranch>(left, right); 10662 if (FLAG_hydrogen_track_positions) { 10663 result->set_operand_position(zone(), 0, left_position); 10664 result->set_operand_position(zone(), 1, right_position); 10665 } 10666 return result; 10667 } else { 10668 BuildCheckHeapObject(operand_to_check); 10669 Add<HCheckInstanceType>(operand_to_check, 10670 HCheckInstanceType::IS_SPEC_OBJECT); 10671 HCompareObjectEqAndBranch* result = 10672 New<HCompareObjectEqAndBranch>(left, right); 10673 return result; 10674 } 10675 } else { 10676 Bailout(kUnsupportedNonPrimitiveCompare); 10677 return NULL; 10678 } 10679 } else if (combined_type->Is(Type::InternalizedString()) && 10680 Token::IsEqualityOp(op)) { 10681 // If we have a constant argument, it should be consistent with the type 10682 // feedback (otherwise we fail assertions in HCompareObjectEqAndBranch). 10683 if ((left->IsConstant() && 10684 !HConstant::cast(left)->HasInternalizedStringValue()) || 10685 (right->IsConstant() && 10686 !HConstant::cast(right)->HasInternalizedStringValue())) { 10687 Add<HDeoptimize>("Type mismatch between feedback and constant", 10688 Deoptimizer::SOFT); 10689 // The caller expects a branch instruction, so make it happy. 10690 return New<HBranch>(graph()->GetConstantTrue()); 10691 } 10692 BuildCheckHeapObject(left); 10693 Add<HCheckInstanceType>(left, HCheckInstanceType::IS_INTERNALIZED_STRING); 10694 BuildCheckHeapObject(right); 10695 Add<HCheckInstanceType>(right, HCheckInstanceType::IS_INTERNALIZED_STRING); 10696 HCompareObjectEqAndBranch* result = 10697 New<HCompareObjectEqAndBranch>(left, right); 10698 return result; 10699 } else if (combined_type->Is(Type::String())) { 10700 BuildCheckHeapObject(left); 10701 Add<HCheckInstanceType>(left, HCheckInstanceType::IS_STRING); 10702 BuildCheckHeapObject(right); 10703 Add<HCheckInstanceType>(right, HCheckInstanceType::IS_STRING); 10704 HStringCompareAndBranch* result = 10705 New<HStringCompareAndBranch>(left, right, op); 10706 return result; 10707 } else { 10708 if (combined_rep.IsTagged() || combined_rep.IsNone()) { 10709 HCompareGeneric* result = Add<HCompareGeneric>(left, right, op); 10710 result->set_observed_input_representation(1, left_rep); 10711 result->set_observed_input_representation(2, right_rep); 10712 if (result->HasObservableSideEffects()) { 10713 if (push_sim_result == PUSH_BEFORE_SIMULATE) { 10714 Push(result); 10715 AddSimulate(bailout_id, REMOVABLE_SIMULATE); 10716 Drop(1); 10717 } else { 10718 AddSimulate(bailout_id, REMOVABLE_SIMULATE); 10719 } 10720 } 10721 // TODO(jkummerow): Can we make this more efficient? 10722 HBranch* branch = New<HBranch>(result); 10723 return branch; 10724 } else { 10725 HCompareNumericAndBranch* result = 10726 New<HCompareNumericAndBranch>(left, right, op); 10727 result->set_observed_input_representation(left_rep, right_rep); 10728 if (FLAG_hydrogen_track_positions) { 10729 result->SetOperandPositions(zone(), left_position, right_position); 10730 } 10731 return result; 10732 } 10733 } 10734 } 10735 10736 10737 void HOptimizedGraphBuilder::HandleLiteralCompareNil(CompareOperation* expr, 10738 Expression* sub_expr, 10739 NilValue nil) { 10740 ASSERT(!HasStackOverflow()); 10741 ASSERT(current_block() != NULL); 10742 ASSERT(current_block()->HasPredecessor()); 10743 ASSERT(expr->op() == Token::EQ || expr->op() == Token::EQ_STRICT); 10744 if (!FLAG_hydrogen_track_positions) SetSourcePosition(expr->position()); 10745 CHECK_ALIVE(VisitForValue(sub_expr)); 10746 HValue* value = Pop(); 10747 if (expr->op() == Token::EQ_STRICT) { 10748 HConstant* nil_constant = nil == kNullValue 10749 ? graph()->GetConstantNull() 10750 : graph()->GetConstantUndefined(); 10751 HCompareObjectEqAndBranch* instr = 10752 New<HCompareObjectEqAndBranch>(value, nil_constant); 10753 return ast_context()->ReturnControl(instr, expr->id()); 10754 } else { 10755 ASSERT_EQ(Token::EQ, expr->op()); 10756 Type* type = expr->combined_type()->Is(Type::None()) 10757 ? Type::Any(zone()) : expr->combined_type(); 10758 HIfContinuation continuation; 10759 BuildCompareNil(value, type, &continuation); 10760 return ast_context()->ReturnContinuation(&continuation, expr->id()); 10761 } 10762 } 10763 10764 10765 HInstruction* HOptimizedGraphBuilder::BuildThisFunction() { 10766 // If we share optimized code between different closures, the 10767 // this-function is not a constant, except inside an inlined body. 10768 if (function_state()->outer() != NULL) { 10769 return New<HConstant>( 10770 function_state()->compilation_info()->closure()); 10771 } else { 10772 return New<HThisFunction>(); 10773 } 10774 } 10775 10776 10777 HInstruction* HOptimizedGraphBuilder::BuildFastLiteral( 10778 Handle<JSObject> boilerplate_object, 10779 AllocationSiteUsageContext* site_context) { 10780 NoObservableSideEffectsScope no_effects(this); 10781 InstanceType instance_type = boilerplate_object->map()->instance_type(); 10782 ASSERT(instance_type == JS_ARRAY_TYPE || instance_type == JS_OBJECT_TYPE); 10783 10784 HType type = instance_type == JS_ARRAY_TYPE 10785 ? HType::JSArray() : HType::JSObject(); 10786 HValue* object_size_constant = Add<HConstant>( 10787 boilerplate_object->map()->instance_size()); 10788 10789 PretenureFlag pretenure_flag = NOT_TENURED; 10790 if (FLAG_allocation_site_pretenuring) { 10791 pretenure_flag = site_context->current()->GetPretenureMode(); 10792 Handle<AllocationSite> site(site_context->current()); 10793 AllocationSite::AddDependentCompilationInfo( 10794 site, AllocationSite::TENURING, top_info()); 10795 } 10796 10797 HInstruction* object = Add<HAllocate>(object_size_constant, type, 10798 pretenure_flag, instance_type, site_context->current()); 10799 10800 // If allocation folding reaches Page::kMaxRegularHeapObjectSize the 10801 // elements array may not get folded into the object. Hence, we set the 10802 // elements pointer to empty fixed array and let store elimination remove 10803 // this store in the folding case. 10804 HConstant* empty_fixed_array = Add<HConstant>( 10805 isolate()->factory()->empty_fixed_array()); 10806 Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(), 10807 empty_fixed_array); 10808 10809 BuildEmitObjectHeader(boilerplate_object, object); 10810 10811 Handle<FixedArrayBase> elements(boilerplate_object->elements()); 10812 int elements_size = (elements->length() > 0 && 10813 elements->map() != isolate()->heap()->fixed_cow_array_map()) ? 10814 elements->Size() : 0; 10815 10816 if (pretenure_flag == TENURED && 10817 elements->map() == isolate()->heap()->fixed_cow_array_map() && 10818 isolate()->heap()->InNewSpace(*elements)) { 10819 // If we would like to pretenure a fixed cow array, we must ensure that the 10820 // array is already in old space, otherwise we'll create too many old-to- 10821 // new-space pointers (overflowing the store buffer). 10822 elements = Handle<FixedArrayBase>( 10823 isolate()->factory()->CopyAndTenureFixedCOWArray( 10824 Handle<FixedArray>::cast(elements))); 10825 boilerplate_object->set_elements(*elements); 10826 } 10827 10828 HInstruction* object_elements = NULL; 10829 if (elements_size > 0) { 10830 HValue* object_elements_size = Add<HConstant>(elements_size); 10831 InstanceType instance_type = boilerplate_object->HasFastDoubleElements() 10832 ? FIXED_DOUBLE_ARRAY_TYPE : FIXED_ARRAY_TYPE; 10833 object_elements = Add<HAllocate>( 10834 object_elements_size, HType::HeapObject(), 10835 pretenure_flag, instance_type, site_context->current()); 10836 } 10837 BuildInitElementsInObjectHeader(boilerplate_object, object, object_elements); 10838 10839 // Copy object elements if non-COW. 10840 if (object_elements != NULL) { 10841 BuildEmitElements(boilerplate_object, elements, object_elements, 10842 site_context); 10843 } 10844 10845 // Copy in-object properties. 10846 if (boilerplate_object->map()->NumberOfFields() != 0) { 10847 BuildEmitInObjectProperties(boilerplate_object, object, site_context, 10848 pretenure_flag); 10849 } 10850 return object; 10851 } 10852 10853 10854 void HOptimizedGraphBuilder::BuildEmitObjectHeader( 10855 Handle<JSObject> boilerplate_object, 10856 HInstruction* object) { 10857 ASSERT(boilerplate_object->properties()->length() == 0); 10858 10859 Handle<Map> boilerplate_object_map(boilerplate_object->map()); 10860 AddStoreMapConstant(object, boilerplate_object_map); 10861 10862 Handle<Object> properties_field = 10863 Handle<Object>(boilerplate_object->properties(), isolate()); 10864 ASSERT(*properties_field == isolate()->heap()->empty_fixed_array()); 10865 HInstruction* properties = Add<HConstant>(properties_field); 10866 HObjectAccess access = HObjectAccess::ForPropertiesPointer(); 10867 Add<HStoreNamedField>(object, access, properties); 10868 10869 if (boilerplate_object->IsJSArray()) { 10870 Handle<JSArray> boilerplate_array = 10871 Handle<JSArray>::cast(boilerplate_object); 10872 Handle<Object> length_field = 10873 Handle<Object>(boilerplate_array->length(), isolate()); 10874 HInstruction* length = Add<HConstant>(length_field); 10875 10876 ASSERT(boilerplate_array->length()->IsSmi()); 10877 Add<HStoreNamedField>(object, HObjectAccess::ForArrayLength( 10878 boilerplate_array->GetElementsKind()), length); 10879 } 10880 } 10881 10882 10883 void HOptimizedGraphBuilder::BuildInitElementsInObjectHeader( 10884 Handle<JSObject> boilerplate_object, 10885 HInstruction* object, 10886 HInstruction* object_elements) { 10887 ASSERT(boilerplate_object->properties()->length() == 0); 10888 if (object_elements == NULL) { 10889 Handle<Object> elements_field = 10890 Handle<Object>(boilerplate_object->elements(), isolate()); 10891 object_elements = Add<HConstant>(elements_field); 10892 } 10893 Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(), 10894 object_elements); 10895 } 10896 10897 10898 void HOptimizedGraphBuilder::BuildEmitInObjectProperties( 10899 Handle<JSObject> boilerplate_object, 10900 HInstruction* object, 10901 AllocationSiteUsageContext* site_context, 10902 PretenureFlag pretenure_flag) { 10903 Handle<Map> boilerplate_map(boilerplate_object->map()); 10904 Handle<DescriptorArray> descriptors(boilerplate_map->instance_descriptors()); 10905 int limit = boilerplate_map->NumberOfOwnDescriptors(); 10906 10907 int copied_fields = 0; 10908 for (int i = 0; i < limit; i++) { 10909 PropertyDetails details = descriptors->GetDetails(i); 10910 if (details.type() != FIELD) continue; 10911 copied_fields++; 10912 int index = descriptors->GetFieldIndex(i); 10913 int property_offset = boilerplate_object->GetInObjectPropertyOffset(index); 10914 Handle<Name> name(descriptors->GetKey(i)); 10915 Handle<Object> value = 10916 Handle<Object>(boilerplate_object->InObjectPropertyAt(index), 10917 isolate()); 10918 10919 // The access for the store depends on the type of the boilerplate. 10920 HObjectAccess access = boilerplate_object->IsJSArray() ? 10921 HObjectAccess::ForJSArrayOffset(property_offset) : 10922 HObjectAccess::ForMapAndOffset(boilerplate_map, property_offset); 10923 10924 if (value->IsJSObject()) { 10925 Handle<JSObject> value_object = Handle<JSObject>::cast(value); 10926 Handle<AllocationSite> current_site = site_context->EnterNewScope(); 10927 HInstruction* result = 10928 BuildFastLiteral(value_object, site_context); 10929 site_context->ExitScope(current_site, value_object); 10930 Add<HStoreNamedField>(object, access, result); 10931 } else { 10932 Representation representation = details.representation(); 10933 HInstruction* value_instruction; 10934 10935 if (representation.IsDouble()) { 10936 // Allocate a HeapNumber box and store the value into it. 10937 HValue* heap_number_constant = Add<HConstant>(HeapNumber::kSize); 10938 // This heap number alloc does not have a corresponding 10939 // AllocationSite. That is okay because 10940 // 1) it's a child object of another object with a valid allocation site 10941 // 2) we can just use the mode of the parent object for pretenuring 10942 HInstruction* double_box = 10943 Add<HAllocate>(heap_number_constant, HType::HeapObject(), 10944 pretenure_flag, HEAP_NUMBER_TYPE); 10945 AddStoreMapConstant(double_box, 10946 isolate()->factory()->heap_number_map()); 10947 Add<HStoreNamedField>(double_box, HObjectAccess::ForHeapNumberValue(), 10948 Add<HConstant>(value)); 10949 value_instruction = double_box; 10950 } else if (representation.IsSmi()) { 10951 value_instruction = value->IsUninitialized() 10952 ? graph()->GetConstant0() 10953 : Add<HConstant>(value); 10954 // Ensure that value is stored as smi. 10955 access = access.WithRepresentation(representation); 10956 } else { 10957 value_instruction = Add<HConstant>(value); 10958 } 10959 10960 Add<HStoreNamedField>(object, access, value_instruction); 10961 } 10962 } 10963 10964 int inobject_properties = boilerplate_object->map()->inobject_properties(); 10965 HInstruction* value_instruction = 10966 Add<HConstant>(isolate()->factory()->one_pointer_filler_map()); 10967 for (int i = copied_fields; i < inobject_properties; i++) { 10968 ASSERT(boilerplate_object->IsJSObject()); 10969 int property_offset = boilerplate_object->GetInObjectPropertyOffset(i); 10970 HObjectAccess access = 10971 HObjectAccess::ForMapAndOffset(boilerplate_map, property_offset); 10972 Add<HStoreNamedField>(object, access, value_instruction); 10973 } 10974 } 10975 10976 10977 void HOptimizedGraphBuilder::BuildEmitElements( 10978 Handle<JSObject> boilerplate_object, 10979 Handle<FixedArrayBase> elements, 10980 HValue* object_elements, 10981 AllocationSiteUsageContext* site_context) { 10982 ElementsKind kind = boilerplate_object->map()->elements_kind(); 10983 int elements_length = elements->length(); 10984 HValue* object_elements_length = Add<HConstant>(elements_length); 10985 BuildInitializeElementsHeader(object_elements, kind, object_elements_length); 10986 10987 // Copy elements backing store content. 10988 if (elements->IsFixedDoubleArray()) { 10989 BuildEmitFixedDoubleArray(elements, kind, object_elements); 10990 } else if (elements->IsFixedArray()) { 10991 BuildEmitFixedArray(elements, kind, object_elements, 10992 site_context); 10993 } else { 10994 UNREACHABLE(); 10995 } 10996 } 10997 10998 10999 void HOptimizedGraphBuilder::BuildEmitFixedDoubleArray( 11000 Handle<FixedArrayBase> elements, 11001 ElementsKind kind, 11002 HValue* object_elements) { 11003 HInstruction* boilerplate_elements = Add<HConstant>(elements); 11004 int elements_length = elements->length(); 11005 for (int i = 0; i < elements_length; i++) { 11006 HValue* key_constant = Add<HConstant>(i); 11007 HInstruction* value_instruction = 11008 Add<HLoadKeyed>(boilerplate_elements, key_constant, 11009 static_cast<HValue*>(NULL), kind, 11010 ALLOW_RETURN_HOLE); 11011 HInstruction* store = Add<HStoreKeyed>(object_elements, key_constant, 11012 value_instruction, kind); 11013 store->SetFlag(HValue::kAllowUndefinedAsNaN); 11014 } 11015 } 11016 11017 11018 void HOptimizedGraphBuilder::BuildEmitFixedArray( 11019 Handle<FixedArrayBase> elements, 11020 ElementsKind kind, 11021 HValue* object_elements, 11022 AllocationSiteUsageContext* site_context) { 11023 HInstruction* boilerplate_elements = Add<HConstant>(elements); 11024 int elements_length = elements->length(); 11025 Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements); 11026 for (int i = 0; i < elements_length; i++) { 11027 Handle<Object> value(fast_elements->get(i), isolate()); 11028 HValue* key_constant = Add<HConstant>(i); 11029 if (value->IsJSObject()) { 11030 Handle<JSObject> value_object = Handle<JSObject>::cast(value); 11031 Handle<AllocationSite> current_site = site_context->EnterNewScope(); 11032 HInstruction* result = 11033 BuildFastLiteral(value_object, site_context); 11034 site_context->ExitScope(current_site, value_object); 11035 Add<HStoreKeyed>(object_elements, key_constant, result, kind); 11036 } else { 11037 HInstruction* value_instruction = 11038 Add<HLoadKeyed>(boilerplate_elements, key_constant, 11039 static_cast<HValue*>(NULL), kind, 11040 ALLOW_RETURN_HOLE); 11041 Add<HStoreKeyed>(object_elements, key_constant, value_instruction, kind); 11042 } 11043 } 11044 } 11045 11046 11047 void HOptimizedGraphBuilder::VisitThisFunction(ThisFunction* expr) { 11048 ASSERT(!HasStackOverflow()); 11049 ASSERT(current_block() != NULL); 11050 ASSERT(current_block()->HasPredecessor()); 11051 HInstruction* instr = BuildThisFunction(); 11052 return ast_context()->ReturnInstruction(instr, expr->id()); 11053 } 11054 11055 11056 void HOptimizedGraphBuilder::VisitDeclarations( 11057 ZoneList<Declaration*>* declarations) { 11058 ASSERT(globals_.is_empty()); 11059 AstVisitor::VisitDeclarations(declarations); 11060 if (!globals_.is_empty()) { 11061 Handle<FixedArray> array = 11062 isolate()->factory()->NewFixedArray(globals_.length(), TENURED); 11063 for (int i = 0; i < globals_.length(); ++i) array->set(i, *globals_.at(i)); 11064 int flags = DeclareGlobalsEvalFlag::encode(current_info()->is_eval()) | 11065 DeclareGlobalsNativeFlag::encode(current_info()->is_native()) | 11066 DeclareGlobalsStrictMode::encode(current_info()->strict_mode()); 11067 Add<HDeclareGlobals>(array, flags); 11068 globals_.Rewind(0); 11069 } 11070 } 11071 11072 11073 void HOptimizedGraphBuilder::VisitVariableDeclaration( 11074 VariableDeclaration* declaration) { 11075 VariableProxy* proxy = declaration->proxy(); 11076 VariableMode mode = declaration->mode(); 11077 Variable* variable = proxy->var(); 11078 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY; 11079 switch (variable->location()) { 11080 case Variable::UNALLOCATED: 11081 globals_.Add(variable->name(), zone()); 11082 globals_.Add(variable->binding_needs_init() 11083 ? isolate()->factory()->the_hole_value() 11084 : isolate()->factory()->undefined_value(), zone()); 11085 return; 11086 case Variable::PARAMETER: 11087 case Variable::LOCAL: 11088 if (hole_init) { 11089 HValue* value = graph()->GetConstantHole(); 11090 environment()->Bind(variable, value); 11091 } 11092 break; 11093 case Variable::CONTEXT: 11094 if (hole_init) { 11095 HValue* value = graph()->GetConstantHole(); 11096 HValue* context = environment()->context(); 11097 HStoreContextSlot* store = Add<HStoreContextSlot>( 11098 context, variable->index(), HStoreContextSlot::kNoCheck, value); 11099 if (store->HasObservableSideEffects()) { 11100 Add<HSimulate>(proxy->id(), REMOVABLE_SIMULATE); 11101 } 11102 } 11103 break; 11104 case Variable::LOOKUP: 11105 return Bailout(kUnsupportedLookupSlotInDeclaration); 11106 } 11107 } 11108 11109 11110 void HOptimizedGraphBuilder::VisitFunctionDeclaration( 11111 FunctionDeclaration* declaration) { 11112 VariableProxy* proxy = declaration->proxy(); 11113 Variable* variable = proxy->var(); 11114 switch (variable->location()) { 11115 case Variable::UNALLOCATED: { 11116 globals_.Add(variable->name(), zone()); 11117 Handle<SharedFunctionInfo> function = Compiler::BuildFunctionInfo( 11118 declaration->fun(), current_info()->script()); 11119 // Check for stack-overflow exception. 11120 if (function.is_null()) return SetStackOverflow(); 11121 globals_.Add(function, zone()); 11122 return; 11123 } 11124 case Variable::PARAMETER: 11125 case Variable::LOCAL: { 11126 CHECK_ALIVE(VisitForValue(declaration->fun())); 11127 HValue* value = Pop(); 11128 BindIfLive(variable, value); 11129 break; 11130 } 11131 case Variable::CONTEXT: { 11132 CHECK_ALIVE(VisitForValue(declaration->fun())); 11133 HValue* value = Pop(); 11134 HValue* context = environment()->context(); 11135 HStoreContextSlot* store = Add<HStoreContextSlot>( 11136 context, variable->index(), HStoreContextSlot::kNoCheck, value); 11137 if (store->HasObservableSideEffects()) { 11138 Add<HSimulate>(proxy->id(), REMOVABLE_SIMULATE); 11139 } 11140 break; 11141 } 11142 case Variable::LOOKUP: 11143 return Bailout(kUnsupportedLookupSlotInDeclaration); 11144 } 11145 } 11146 11147 11148 void HOptimizedGraphBuilder::VisitModuleDeclaration( 11149 ModuleDeclaration* declaration) { 11150 UNREACHABLE(); 11151 } 11152 11153 11154 void HOptimizedGraphBuilder::VisitImportDeclaration( 11155 ImportDeclaration* declaration) { 11156 UNREACHABLE(); 11157 } 11158 11159 11160 void HOptimizedGraphBuilder::VisitExportDeclaration( 11161 ExportDeclaration* declaration) { 11162 UNREACHABLE(); 11163 } 11164 11165 11166 void HOptimizedGraphBuilder::VisitModuleLiteral(ModuleLiteral* module) { 11167 UNREACHABLE(); 11168 } 11169 11170 11171 void HOptimizedGraphBuilder::VisitModuleVariable(ModuleVariable* module) { 11172 UNREACHABLE(); 11173 } 11174 11175 11176 void HOptimizedGraphBuilder::VisitModulePath(ModulePath* module) { 11177 UNREACHABLE(); 11178 } 11179 11180 11181 void HOptimizedGraphBuilder::VisitModuleUrl(ModuleUrl* module) { 11182 UNREACHABLE(); 11183 } 11184 11185 11186 void HOptimizedGraphBuilder::VisitModuleStatement(ModuleStatement* stmt) { 11187 UNREACHABLE(); 11188 } 11189 11190 11191 // Generators for inline runtime functions. 11192 // Support for types. 11193 void HOptimizedGraphBuilder::GenerateIsSmi(CallRuntime* call) { 11194 ASSERT(call->arguments()->length() == 1); 11195 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 11196 HValue* value = Pop(); 11197 HIsSmiAndBranch* result = New<HIsSmiAndBranch>(value); 11198 return ast_context()->ReturnControl(result, call->id()); 11199 } 11200 11201 11202 void HOptimizedGraphBuilder::GenerateIsSpecObject(CallRuntime* call) { 11203 ASSERT(call->arguments()->length() == 1); 11204 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 11205 HValue* value = Pop(); 11206 HHasInstanceTypeAndBranch* result = 11207 New<HHasInstanceTypeAndBranch>(value, 11208 FIRST_SPEC_OBJECT_TYPE, 11209 LAST_SPEC_OBJECT_TYPE); 11210 return ast_context()->ReturnControl(result, call->id()); 11211 } 11212 11213 11214 void HOptimizedGraphBuilder::GenerateIsFunction(CallRuntime* call) { 11215 ASSERT(call->arguments()->length() == 1); 11216 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 11217 HValue* value = Pop(); 11218 HHasInstanceTypeAndBranch* result = 11219 New<HHasInstanceTypeAndBranch>(value, JS_FUNCTION_TYPE); 11220 return ast_context()->ReturnControl(result, call->id()); 11221 } 11222 11223 11224 void HOptimizedGraphBuilder::GenerateIsMinusZero(CallRuntime* call) { 11225 ASSERT(call->arguments()->length() == 1); 11226 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 11227 HValue* value = Pop(); 11228 HCompareMinusZeroAndBranch* result = New<HCompareMinusZeroAndBranch>(value); 11229 return ast_context()->ReturnControl(result, call->id()); 11230 } 11231 11232 11233 void HOptimizedGraphBuilder::GenerateHasCachedArrayIndex(CallRuntime* call) { 11234 ASSERT(call->arguments()->length() == 1); 11235 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 11236 HValue* value = Pop(); 11237 HHasCachedArrayIndexAndBranch* result = 11238 New<HHasCachedArrayIndexAndBranch>(value); 11239 return ast_context()->ReturnControl(result, call->id()); 11240 } 11241 11242 11243 void HOptimizedGraphBuilder::GenerateIsArray(CallRuntime* call) { 11244 ASSERT(call->arguments()->length() == 1); 11245 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 11246 HValue* value = Pop(); 11247 HHasInstanceTypeAndBranch* result = 11248 New<HHasInstanceTypeAndBranch>(value, JS_ARRAY_TYPE); 11249 return ast_context()->ReturnControl(result, call->id()); 11250 } 11251 11252 11253 void HOptimizedGraphBuilder::GenerateIsRegExp(CallRuntime* call) { 11254 ASSERT(call->arguments()->length() == 1); 11255 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 11256 HValue* value = Pop(); 11257 HHasInstanceTypeAndBranch* result = 11258 New<HHasInstanceTypeAndBranch>(value, JS_REGEXP_TYPE); 11259 return ast_context()->ReturnControl(result, call->id()); 11260 } 11261 11262 11263 void HOptimizedGraphBuilder::GenerateIsObject(CallRuntime* call) { 11264 ASSERT(call->arguments()->length() == 1); 11265 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 11266 HValue* value = Pop(); 11267 HIsObjectAndBranch* result = New<HIsObjectAndBranch>(value); 11268 return ast_context()->ReturnControl(result, call->id()); 11269 } 11270 11271 11272 void HOptimizedGraphBuilder::GenerateIsNonNegativeSmi(CallRuntime* call) { 11273 return Bailout(kInlinedRuntimeFunctionIsNonNegativeSmi); 11274 } 11275 11276 11277 void HOptimizedGraphBuilder::GenerateIsUndetectableObject(CallRuntime* call) { 11278 ASSERT(call->arguments()->length() == 1); 11279 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 11280 HValue* value = Pop(); 11281 HIsUndetectableAndBranch* result = New<HIsUndetectableAndBranch>(value); 11282 return ast_context()->ReturnControl(result, call->id()); 11283 } 11284 11285 11286 void HOptimizedGraphBuilder::GenerateIsStringWrapperSafeForDefaultValueOf( 11287 CallRuntime* call) { 11288 return Bailout(kInlinedRuntimeFunctionIsStringWrapperSafeForDefaultValueOf); 11289 } 11290 11291 11292 // Support for construct call checks. 11293 void HOptimizedGraphBuilder::GenerateIsConstructCall(CallRuntime* call) { 11294 ASSERT(call->arguments()->length() == 0); 11295 if (function_state()->outer() != NULL) { 11296 // We are generating graph for inlined function. 11297 HValue* value = function_state()->inlining_kind() == CONSTRUCT_CALL_RETURN 11298 ? graph()->GetConstantTrue() 11299 : graph()->GetConstantFalse(); 11300 return ast_context()->ReturnValue(value); 11301 } else { 11302 return ast_context()->ReturnControl(New<HIsConstructCallAndBranch>(), 11303 call->id()); 11304 } 11305 } 11306 11307 11308 // Support for arguments.length and arguments[?]. 11309 void HOptimizedGraphBuilder::GenerateArgumentsLength(CallRuntime* call) { 11310 // Our implementation of arguments (based on this stack frame or an 11311 // adapter below it) does not work for inlined functions. This runtime 11312 // function is blacklisted by AstNode::IsInlineable. 11313 ASSERT(function_state()->outer() == NULL); 11314 ASSERT(call->arguments()->length() == 0); 11315 HInstruction* elements = Add<HArgumentsElements>(false); 11316 HArgumentsLength* result = New<HArgumentsLength>(elements); 11317 return ast_context()->ReturnInstruction(result, call->id()); 11318 } 11319 11320 11321 void HOptimizedGraphBuilder::GenerateArguments(CallRuntime* call) { 11322 // Our implementation of arguments (based on this stack frame or an 11323 // adapter below it) does not work for inlined functions. This runtime 11324 // function is blacklisted by AstNode::IsInlineable. 11325 ASSERT(function_state()->outer() == NULL); 11326 ASSERT(call->arguments()->length() == 1); 11327 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 11328 HValue* index = Pop(); 11329 HInstruction* elements = Add<HArgumentsElements>(false); 11330 HInstruction* length = Add<HArgumentsLength>(elements); 11331 HInstruction* checked_index = Add<HBoundsCheck>(index, length); 11332 HAccessArgumentsAt* result = New<HAccessArgumentsAt>( 11333 elements, length, checked_index); 11334 return ast_context()->ReturnInstruction(result, call->id()); 11335 } 11336 11337 11338 // Support for accessing the class and value fields of an object. 11339 void HOptimizedGraphBuilder::GenerateClassOf(CallRuntime* call) { 11340 // The special form detected by IsClassOfTest is detected before we get here 11341 // and does not cause a bailout. 11342 return Bailout(kInlinedRuntimeFunctionClassOf); 11343 } 11344 11345 11346 void HOptimizedGraphBuilder::GenerateValueOf(CallRuntime* call) { 11347 ASSERT(call->arguments()->length() == 1); 11348 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 11349 HValue* object = Pop(); 11350 11351 IfBuilder if_objectisvalue(this); 11352 HValue* objectisvalue = if_objectisvalue.If<HHasInstanceTypeAndBranch>( 11353 object, JS_VALUE_TYPE); 11354 if_objectisvalue.Then(); 11355 { 11356 // Return the actual value. 11357 Push(Add<HLoadNamedField>( 11358 object, objectisvalue, 11359 HObjectAccess::ForObservableJSObjectOffset( 11360 JSValue::kValueOffset))); 11361 Add<HSimulate>(call->id(), FIXED_SIMULATE); 11362 } 11363 if_objectisvalue.Else(); 11364 { 11365 // If the object is not a value return the object. 11366 Push(object); 11367 Add<HSimulate>(call->id(), FIXED_SIMULATE); 11368 } 11369 if_objectisvalue.End(); 11370 return ast_context()->ReturnValue(Pop()); 11371 } 11372 11373 11374 void HOptimizedGraphBuilder::GenerateDateField(CallRuntime* call) { 11375 ASSERT(call->arguments()->length() == 2); 11376 ASSERT_NE(NULL, call->arguments()->at(1)->AsLiteral()); 11377 Smi* index = Smi::cast(*(call->arguments()->at(1)->AsLiteral()->value())); 11378 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 11379 HValue* date = Pop(); 11380 HDateField* result = New<HDateField>(date, index); 11381 return ast_context()->ReturnInstruction(result, call->id()); 11382 } 11383 11384 11385 void HOptimizedGraphBuilder::GenerateOneByteSeqStringSetChar( 11386 CallRuntime* call) { 11387 ASSERT(call->arguments()->length() == 3); 11388 // We need to follow the evaluation order of full codegen. 11389 CHECK_ALIVE(VisitForValue(call->arguments()->at(1))); 11390 CHECK_ALIVE(VisitForValue(call->arguments()->at(2))); 11391 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 11392 HValue* string = Pop(); 11393 HValue* value = Pop(); 11394 HValue* index = Pop(); 11395 Add<HSeqStringSetChar>(String::ONE_BYTE_ENCODING, string, 11396 index, value); 11397 Add<HSimulate>(call->id(), FIXED_SIMULATE); 11398 return ast_context()->ReturnValue(graph()->GetConstantUndefined()); 11399 } 11400 11401 11402 void HOptimizedGraphBuilder::GenerateTwoByteSeqStringSetChar( 11403 CallRuntime* call) { 11404 ASSERT(call->arguments()->length() == 3); 11405 // We need to follow the evaluation order of full codegen. 11406 CHECK_ALIVE(VisitForValue(call->arguments()->at(1))); 11407 CHECK_ALIVE(VisitForValue(call->arguments()->at(2))); 11408 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 11409 HValue* string = Pop(); 11410 HValue* value = Pop(); 11411 HValue* index = Pop(); 11412 Add<HSeqStringSetChar>(String::TWO_BYTE_ENCODING, string, 11413 index, value); 11414 Add<HSimulate>(call->id(), FIXED_SIMULATE); 11415 return ast_context()->ReturnValue(graph()->GetConstantUndefined()); 11416 } 11417 11418 11419 void HOptimizedGraphBuilder::GenerateSetValueOf(CallRuntime* call) { 11420 ASSERT(call->arguments()->length() == 2); 11421 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 11422 CHECK_ALIVE(VisitForValue(call->arguments()->at(1))); 11423 HValue* value = Pop(); 11424 HValue* object = Pop(); 11425 11426 // Check if object is a JSValue. 11427 IfBuilder if_objectisvalue(this); 11428 if_objectisvalue.If<HHasInstanceTypeAndBranch>(object, JS_VALUE_TYPE); 11429 if_objectisvalue.Then(); 11430 { 11431 // Create in-object property store to kValueOffset. 11432 Add<HStoreNamedField>(object, 11433 HObjectAccess::ForObservableJSObjectOffset(JSValue::kValueOffset), 11434 value); 11435 if (!ast_context()->IsEffect()) { 11436 Push(value); 11437 } 11438 Add<HSimulate>(call->id(), FIXED_SIMULATE); 11439 } 11440 if_objectisvalue.Else(); 11441 { 11442 // Nothing to do in this case. 11443 if (!ast_context()->IsEffect()) { 11444 Push(value); 11445 } 11446 Add<HSimulate>(call->id(), FIXED_SIMULATE); 11447 } 11448 if_objectisvalue.End(); 11449 if (!ast_context()->IsEffect()) { 11450 Drop(1); 11451 } 11452 return ast_context()->ReturnValue(value); 11453 } 11454 11455 11456 // Fast support for charCodeAt(n). 11457 void HOptimizedGraphBuilder::GenerateStringCharCodeAt(CallRuntime* call) { 11458 ASSERT(call->arguments()->length() == 2); 11459 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 11460 CHECK_ALIVE(VisitForValue(call->arguments()->at(1))); 11461 HValue* index = Pop(); 11462 HValue* string = Pop(); 11463 HInstruction* result = BuildStringCharCodeAt(string, index); 11464 return ast_context()->ReturnInstruction(result, call->id()); 11465 } 11466 11467 11468 // Fast support for string.charAt(n) and string[n]. 11469 void HOptimizedGraphBuilder::GenerateStringCharFromCode(CallRuntime* call) { 11470 ASSERT(call->arguments()->length() == 1); 11471 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 11472 HValue* char_code = Pop(); 11473 HInstruction* result = NewUncasted<HStringCharFromCode>(char_code); 11474 return ast_context()->ReturnInstruction(result, call->id()); 11475 } 11476 11477 11478 // Fast support for string.charAt(n) and string[n]. 11479 void HOptimizedGraphBuilder::GenerateStringCharAt(CallRuntime* call) { 11480 ASSERT(call->arguments()->length() == 2); 11481 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 11482 CHECK_ALIVE(VisitForValue(call->arguments()->at(1))); 11483 HValue* index = Pop(); 11484 HValue* string = Pop(); 11485 HInstruction* char_code = BuildStringCharCodeAt(string, index); 11486 AddInstruction(char_code); 11487 HInstruction* result = NewUncasted<HStringCharFromCode>(char_code); 11488 return ast_context()->ReturnInstruction(result, call->id()); 11489 } 11490 11491 11492 // Fast support for object equality testing. 11493 void HOptimizedGraphBuilder::GenerateObjectEquals(CallRuntime* call) { 11494 ASSERT(call->arguments()->length() == 2); 11495 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 11496 CHECK_ALIVE(VisitForValue(call->arguments()->at(1))); 11497 HValue* right = Pop(); 11498 HValue* left = Pop(); 11499 HCompareObjectEqAndBranch* result = 11500 New<HCompareObjectEqAndBranch>(left, right); 11501 return ast_context()->ReturnControl(result, call->id()); 11502 } 11503 11504 11505 // Fast support for StringAdd. 11506 void HOptimizedGraphBuilder::GenerateStringAdd(CallRuntime* call) { 11507 ASSERT_EQ(2, call->arguments()->length()); 11508 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 11509 CHECK_ALIVE(VisitForValue(call->arguments()->at(1))); 11510 HValue* right = Pop(); 11511 HValue* left = Pop(); 11512 HInstruction* result = NewUncasted<HStringAdd>(left, right); 11513 return ast_context()->ReturnInstruction(result, call->id()); 11514 } 11515 11516 11517 // Fast support for SubString. 11518 void HOptimizedGraphBuilder::GenerateSubString(CallRuntime* call) { 11519 ASSERT_EQ(3, call->arguments()->length()); 11520 CHECK_ALIVE(VisitExpressions(call->arguments())); 11521 PushArgumentsFromEnvironment(call->arguments()->length()); 11522 HCallStub* result = New<HCallStub>(CodeStub::SubString, 3); 11523 return ast_context()->ReturnInstruction(result, call->id()); 11524 } 11525 11526 11527 // Fast support for StringCompare. 11528 void HOptimizedGraphBuilder::GenerateStringCompare(CallRuntime* call) { 11529 ASSERT_EQ(2, call->arguments()->length()); 11530 CHECK_ALIVE(VisitExpressions(call->arguments())); 11531 PushArgumentsFromEnvironment(call->arguments()->length()); 11532 HCallStub* result = New<HCallStub>(CodeStub::StringCompare, 2); 11533 return ast_context()->ReturnInstruction(result, call->id()); 11534 } 11535 11536 11537 // Support for direct calls from JavaScript to native RegExp code. 11538 void HOptimizedGraphBuilder::GenerateRegExpExec(CallRuntime* call) { 11539 ASSERT_EQ(4, call->arguments()->length()); 11540 CHECK_ALIVE(VisitExpressions(call->arguments())); 11541 PushArgumentsFromEnvironment(call->arguments()->length()); 11542 HCallStub* result = New<HCallStub>(CodeStub::RegExpExec, 4); 11543 return ast_context()->ReturnInstruction(result, call->id()); 11544 } 11545 11546 11547 void HOptimizedGraphBuilder::GenerateDoubleLo(CallRuntime* call) { 11548 ASSERT_EQ(1, call->arguments()->length()); 11549 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 11550 HValue* value = Pop(); 11551 HInstruction* result = NewUncasted<HDoubleBits>(value, HDoubleBits::LOW); 11552 return ast_context()->ReturnInstruction(result, call->id()); 11553 } 11554 11555 11556 void HOptimizedGraphBuilder::GenerateDoubleHi(CallRuntime* call) { 11557 ASSERT_EQ(1, call->arguments()->length()); 11558 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 11559 HValue* value = Pop(); 11560 HInstruction* result = NewUncasted<HDoubleBits>(value, HDoubleBits::HIGH); 11561 return ast_context()->ReturnInstruction(result, call->id()); 11562 } 11563 11564 11565 void HOptimizedGraphBuilder::GenerateConstructDouble(CallRuntime* call) { 11566 ASSERT_EQ(2, call->arguments()->length()); 11567 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 11568 CHECK_ALIVE(VisitForValue(call->arguments()->at(1))); 11569 HValue* lo = Pop(); 11570 HValue* hi = Pop(); 11571 HInstruction* result = NewUncasted<HConstructDouble>(hi, lo); 11572 return ast_context()->ReturnInstruction(result, call->id()); 11573 } 11574 11575 11576 // Construct a RegExp exec result with two in-object properties. 11577 void HOptimizedGraphBuilder::GenerateRegExpConstructResult(CallRuntime* call) { 11578 ASSERT_EQ(3, call->arguments()->length()); 11579 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 11580 CHECK_ALIVE(VisitForValue(call->arguments()->at(1))); 11581 CHECK_ALIVE(VisitForValue(call->arguments()->at(2))); 11582 HValue* input = Pop(); 11583 HValue* index = Pop(); 11584 HValue* length = Pop(); 11585 HValue* result = BuildRegExpConstructResult(length, index, input); 11586 return ast_context()->ReturnValue(result); 11587 } 11588 11589 11590 // Support for fast native caches. 11591 void HOptimizedGraphBuilder::GenerateGetFromCache(CallRuntime* call) { 11592 return Bailout(kInlinedRuntimeFunctionGetFromCache); 11593 } 11594 11595 11596 // Fast support for number to string. 11597 void HOptimizedGraphBuilder::GenerateNumberToString(CallRuntime* call) { 11598 ASSERT_EQ(1, call->arguments()->length()); 11599 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 11600 HValue* number = Pop(); 11601 HValue* result = BuildNumberToString(number, Type::Any(zone())); 11602 return ast_context()->ReturnValue(result); 11603 } 11604 11605 11606 // Fast call for custom callbacks. 11607 void HOptimizedGraphBuilder::GenerateCallFunction(CallRuntime* call) { 11608 // 1 ~ The function to call is not itself an argument to the call. 11609 int arg_count = call->arguments()->length() - 1; 11610 ASSERT(arg_count >= 1); // There's always at least a receiver. 11611 11612 CHECK_ALIVE(VisitExpressions(call->arguments())); 11613 // The function is the last argument 11614 HValue* function = Pop(); 11615 // Push the arguments to the stack 11616 PushArgumentsFromEnvironment(arg_count); 11617 11618 IfBuilder if_is_jsfunction(this); 11619 if_is_jsfunction.If<HHasInstanceTypeAndBranch>(function, JS_FUNCTION_TYPE); 11620 11621 if_is_jsfunction.Then(); 11622 { 11623 HInstruction* invoke_result = 11624 Add<HInvokeFunction>(function, arg_count); 11625 if (!ast_context()->IsEffect()) { 11626 Push(invoke_result); 11627 } 11628 Add<HSimulate>(call->id(), FIXED_SIMULATE); 11629 } 11630 11631 if_is_jsfunction.Else(); 11632 { 11633 HInstruction* call_result = 11634 Add<HCallFunction>(function, arg_count); 11635 if (!ast_context()->IsEffect()) { 11636 Push(call_result); 11637 } 11638 Add<HSimulate>(call->id(), FIXED_SIMULATE); 11639 } 11640 if_is_jsfunction.End(); 11641 11642 if (ast_context()->IsEffect()) { 11643 // EffectContext::ReturnValue ignores the value, so we can just pass 11644 // 'undefined' (as we do not have the call result anymore). 11645 return ast_context()->ReturnValue(graph()->GetConstantUndefined()); 11646 } else { 11647 return ast_context()->ReturnValue(Pop()); 11648 } 11649 } 11650 11651 11652 // Fast call to math functions. 11653 void HOptimizedGraphBuilder::GenerateMathPow(CallRuntime* call) { 11654 ASSERT_EQ(2, call->arguments()->length()); 11655 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 11656 CHECK_ALIVE(VisitForValue(call->arguments()->at(1))); 11657 HValue* right = Pop(); 11658 HValue* left = Pop(); 11659 HInstruction* result = NewUncasted<HPower>(left, right); 11660 return ast_context()->ReturnInstruction(result, call->id()); 11661 } 11662 11663 11664 void HOptimizedGraphBuilder::GenerateMathLogRT(CallRuntime* call) { 11665 ASSERT(call->arguments()->length() == 1); 11666 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 11667 HValue* value = Pop(); 11668 HInstruction* result = NewUncasted<HUnaryMathOperation>(value, kMathLog); 11669 return ast_context()->ReturnInstruction(result, call->id()); 11670 } 11671 11672 11673 void HOptimizedGraphBuilder::GenerateMathSqrtRT(CallRuntime* call) { 11674 ASSERT(call->arguments()->length() == 1); 11675 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 11676 HValue* value = Pop(); 11677 HInstruction* result = NewUncasted<HUnaryMathOperation>(value, kMathSqrt); 11678 return ast_context()->ReturnInstruction(result, call->id()); 11679 } 11680 11681 11682 void HOptimizedGraphBuilder::GenerateGetCachedArrayIndex(CallRuntime* call) { 11683 ASSERT(call->arguments()->length() == 1); 11684 CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); 11685 HValue* value = Pop(); 11686 HGetCachedArrayIndex* result = New<HGetCachedArrayIndex>(value); 11687 return ast_context()->ReturnInstruction(result, call->id()); 11688 } 11689 11690 11691 void HOptimizedGraphBuilder::GenerateFastAsciiArrayJoin(CallRuntime* call) { 11692 return Bailout(kInlinedRuntimeFunctionFastAsciiArrayJoin); 11693 } 11694 11695 11696 // Support for generators. 11697 void HOptimizedGraphBuilder::GenerateGeneratorNext(CallRuntime* call) { 11698 return Bailout(kInlinedRuntimeFunctionGeneratorNext); 11699 } 11700 11701 11702 void HOptimizedGraphBuilder::GenerateGeneratorThrow(CallRuntime* call) { 11703 return Bailout(kInlinedRuntimeFunctionGeneratorThrow); 11704 } 11705 11706 11707 void HOptimizedGraphBuilder::GenerateDebugBreakInOptimizedCode( 11708 CallRuntime* call) { 11709 Add<HDebugBreak>(); 11710 return ast_context()->ReturnValue(graph()->GetConstant0()); 11711 } 11712 11713 11714 void HOptimizedGraphBuilder::GenerateDebugCallbackSupportsStepping( 11715 CallRuntime* call) { 11716 ASSERT(call->arguments()->length() == 1); 11717 // Debugging is not supported in optimized code. 11718 return ast_context()->ReturnValue(graph()->GetConstantFalse()); 11719 } 11720 11721 11722 #undef CHECK_BAILOUT 11723 #undef CHECK_ALIVE 11724 11725 11726 HEnvironment::HEnvironment(HEnvironment* outer, 11727 Scope* scope, 11728 Handle<JSFunction> closure, 11729 Zone* zone) 11730 : closure_(closure), 11731 values_(0, zone), 11732 frame_type_(JS_FUNCTION), 11733 parameter_count_(0), 11734 specials_count_(1), 11735 local_count_(0), 11736 outer_(outer), 11737 entry_(NULL), 11738 pop_count_(0), 11739 push_count_(0), 11740 ast_id_(BailoutId::None()), 11741 zone_(zone) { 11742 Scope* declaration_scope = scope->DeclarationScope(); 11743 Initialize(declaration_scope->num_parameters() + 1, 11744 declaration_scope->num_stack_slots(), 0); 11745 } 11746 11747 11748 HEnvironment::HEnvironment(Zone* zone, int parameter_count) 11749 : values_(0, zone), 11750 frame_type_(STUB), 11751 parameter_count_(parameter_count), 11752 specials_count_(1), 11753 local_count_(0), 11754 outer_(NULL), 11755 entry_(NULL), 11756 pop_count_(0), 11757 push_count_(0), 11758 ast_id_(BailoutId::None()), 11759 zone_(zone) { 11760 Initialize(parameter_count, 0, 0); 11761 } 11762 11763 11764 HEnvironment::HEnvironment(const HEnvironment* other, Zone* zone) 11765 : values_(0, zone), 11766 frame_type_(JS_FUNCTION), 11767 parameter_count_(0), 11768 specials_count_(0), 11769 local_count_(0), 11770 outer_(NULL), 11771 entry_(NULL), 11772 pop_count_(0), 11773 push_count_(0), 11774 ast_id_(other->ast_id()), 11775 zone_(zone) { 11776 Initialize(other); 11777 } 11778 11779 11780 HEnvironment::HEnvironment(HEnvironment* outer, 11781 Handle<JSFunction> closure, 11782 FrameType frame_type, 11783 int arguments, 11784 Zone* zone) 11785 : closure_(closure), 11786 values_(arguments, zone), 11787 frame_type_(frame_type), 11788 parameter_count_(arguments), 11789 specials_count_(0), 11790 local_count_(0), 11791 outer_(outer), 11792 entry_(NULL), 11793 pop_count_(0), 11794 push_count_(0), 11795 ast_id_(BailoutId::None()), 11796 zone_(zone) { 11797 } 11798 11799 11800 void HEnvironment::Initialize(int parameter_count, 11801 int local_count, 11802 int stack_height) { 11803 parameter_count_ = parameter_count; 11804 local_count_ = local_count; 11805 11806 // Avoid reallocating the temporaries' backing store on the first Push. 11807 int total = parameter_count + specials_count_ + local_count + stack_height; 11808 values_.Initialize(total + 4, zone()); 11809 for (int i = 0; i < total; ++i) values_.Add(NULL, zone()); 11810 } 11811 11812 11813 void HEnvironment::Initialize(const HEnvironment* other) { 11814 closure_ = other->closure(); 11815 values_.AddAll(other->values_, zone()); 11816 assigned_variables_.Union(other->assigned_variables_, zone()); 11817 frame_type_ = other->frame_type_; 11818 parameter_count_ = other->parameter_count_; 11819 local_count_ = other->local_count_; 11820 if (other->outer_ != NULL) outer_ = other->outer_->Copy(); // Deep copy. 11821 entry_ = other->entry_; 11822 pop_count_ = other->pop_count_; 11823 push_count_ = other->push_count_; 11824 specials_count_ = other->specials_count_; 11825 ast_id_ = other->ast_id_; 11826 } 11827 11828 11829 void HEnvironment::AddIncomingEdge(HBasicBlock* block, HEnvironment* other) { 11830 ASSERT(!block->IsLoopHeader()); 11831 ASSERT(values_.length() == other->values_.length()); 11832 11833 int length = values_.length(); 11834 for (int i = 0; i < length; ++i) { 11835 HValue* value = values_[i]; 11836 if (value != NULL && value->IsPhi() && value->block() == block) { 11837 // There is already a phi for the i'th value. 11838 HPhi* phi = HPhi::cast(value); 11839 // Assert index is correct and that we haven't missed an incoming edge. 11840 ASSERT(phi->merged_index() == i || !phi->HasMergedIndex()); 11841 ASSERT(phi->OperandCount() == block->predecessors()->length()); 11842 phi->AddInput(other->values_[i]); 11843 } else if (values_[i] != other->values_[i]) { 11844 // There is a fresh value on the incoming edge, a phi is needed. 11845 ASSERT(values_[i] != NULL && other->values_[i] != NULL); 11846 HPhi* phi = block->AddNewPhi(i); 11847 HValue* old_value = values_[i]; 11848 for (int j = 0; j < block->predecessors()->length(); j++) { 11849 phi->AddInput(old_value); 11850 } 11851 phi->AddInput(other->values_[i]); 11852 this->values_[i] = phi; 11853 } 11854 } 11855 } 11856 11857 11858 void HEnvironment::Bind(int index, HValue* value) { 11859 ASSERT(value != NULL); 11860 assigned_variables_.Add(index, zone()); 11861 values_[index] = value; 11862 } 11863 11864 11865 bool HEnvironment::HasExpressionAt(int index) const { 11866 return index >= parameter_count_ + specials_count_ + local_count_; 11867 } 11868 11869 11870 bool HEnvironment::ExpressionStackIsEmpty() const { 11871 ASSERT(length() >= first_expression_index()); 11872 return length() == first_expression_index(); 11873 } 11874 11875 11876 void HEnvironment::SetExpressionStackAt(int index_from_top, HValue* value) { 11877 int count = index_from_top + 1; 11878 int index = values_.length() - count; 11879 ASSERT(HasExpressionAt(index)); 11880 // The push count must include at least the element in question or else 11881 // the new value will not be included in this environment's history. 11882 if (push_count_ < count) { 11883 // This is the same effect as popping then re-pushing 'count' elements. 11884 pop_count_ += (count - push_count_); 11885 push_count_ = count; 11886 } 11887 values_[index] = value; 11888 } 11889 11890 11891 void HEnvironment::Drop(int count) { 11892 for (int i = 0; i < count; ++i) { 11893 Pop(); 11894 } 11895 } 11896 11897 11898 HEnvironment* HEnvironment::Copy() const { 11899 return new(zone()) HEnvironment(this, zone()); 11900 } 11901 11902 11903 HEnvironment* HEnvironment::CopyWithoutHistory() const { 11904 HEnvironment* result = Copy(); 11905 result->ClearHistory(); 11906 return result; 11907 } 11908 11909 11910 HEnvironment* HEnvironment::CopyAsLoopHeader(HBasicBlock* loop_header) const { 11911 HEnvironment* new_env = Copy(); 11912 for (int i = 0; i < values_.length(); ++i) { 11913 HPhi* phi = loop_header->AddNewPhi(i); 11914 phi->AddInput(values_[i]); 11915 new_env->values_[i] = phi; 11916 } 11917 new_env->ClearHistory(); 11918 return new_env; 11919 } 11920 11921 11922 HEnvironment* HEnvironment::CreateStubEnvironment(HEnvironment* outer, 11923 Handle<JSFunction> target, 11924 FrameType frame_type, 11925 int arguments) const { 11926 HEnvironment* new_env = 11927 new(zone()) HEnvironment(outer, target, frame_type, 11928 arguments + 1, zone()); 11929 for (int i = 0; i <= arguments; ++i) { // Include receiver. 11930 new_env->Push(ExpressionStackAt(arguments - i)); 11931 } 11932 new_env->ClearHistory(); 11933 return new_env; 11934 } 11935 11936 11937 HEnvironment* HEnvironment::CopyForInlining( 11938 Handle<JSFunction> target, 11939 int arguments, 11940 FunctionLiteral* function, 11941 HConstant* undefined, 11942 InliningKind inlining_kind) const { 11943 ASSERT(frame_type() == JS_FUNCTION); 11944 11945 // Outer environment is a copy of this one without the arguments. 11946 int arity = function->scope()->num_parameters(); 11947 11948 HEnvironment* outer = Copy(); 11949 outer->Drop(arguments + 1); // Including receiver. 11950 outer->ClearHistory(); 11951 11952 if (inlining_kind == CONSTRUCT_CALL_RETURN) { 11953 // Create artificial constructor stub environment. The receiver should 11954 // actually be the constructor function, but we pass the newly allocated 11955 // object instead, DoComputeConstructStubFrame() relies on that. 11956 outer = CreateStubEnvironment(outer, target, JS_CONSTRUCT, arguments); 11957 } else if (inlining_kind == GETTER_CALL_RETURN) { 11958 // We need an additional StackFrame::INTERNAL frame for restoring the 11959 // correct context. 11960 outer = CreateStubEnvironment(outer, target, JS_GETTER, arguments); 11961 } else if (inlining_kind == SETTER_CALL_RETURN) { 11962 // We need an additional StackFrame::INTERNAL frame for temporarily saving 11963 // the argument of the setter, see StoreStubCompiler::CompileStoreViaSetter. 11964 outer = CreateStubEnvironment(outer, target, JS_SETTER, arguments); 11965 } 11966 11967 if (arity != arguments) { 11968 // Create artificial arguments adaptation environment. 11969 outer = CreateStubEnvironment(outer, target, ARGUMENTS_ADAPTOR, arguments); 11970 } 11971 11972 HEnvironment* inner = 11973 new(zone()) HEnvironment(outer, function->scope(), target, zone()); 11974 // Get the argument values from the original environment. 11975 for (int i = 0; i <= arity; ++i) { // Include receiver. 11976 HValue* push = (i <= arguments) ? 11977 ExpressionStackAt(arguments - i) : undefined; 11978 inner->SetValueAt(i, push); 11979 } 11980 inner->SetValueAt(arity + 1, context()); 11981 for (int i = arity + 2; i < inner->length(); ++i) { 11982 inner->SetValueAt(i, undefined); 11983 } 11984 11985 inner->set_ast_id(BailoutId::FunctionEntry()); 11986 return inner; 11987 } 11988 11989 11990 void HEnvironment::PrintTo(StringStream* stream) { 11991 for (int i = 0; i < length(); i++) { 11992 if (i == 0) stream->Add("parameters\n"); 11993 if (i == parameter_count()) stream->Add("specials\n"); 11994 if (i == parameter_count() + specials_count()) stream->Add("locals\n"); 11995 if (i == parameter_count() + specials_count() + local_count()) { 11996 stream->Add("expressions\n"); 11997 } 11998 HValue* val = values_.at(i); 11999 stream->Add("%d: ", i); 12000 if (val != NULL) { 12001 val->PrintNameTo(stream); 12002 } else { 12003 stream->Add("NULL"); 12004 } 12005 stream->Add("\n"); 12006 } 12007 PrintF("\n"); 12008 } 12009 12010 12011 void HEnvironment::PrintToStd() { 12012 HeapStringAllocator string_allocator; 12013 StringStream trace(&string_allocator); 12014 PrintTo(&trace); 12015 PrintF("%s", trace.ToCString().get()); 12016 } 12017 12018 12019 void HTracer::TraceCompilation(CompilationInfo* info) { 12020 Tag tag(this, "compilation"); 12021 if (info->IsOptimizing()) { 12022 Handle<String> name = info->function()->debug_name(); 12023 PrintStringProperty("name", name->ToCString().get()); 12024 PrintIndent(); 12025 trace_.Add("method \"%s:%d\"\n", 12026 name->ToCString().get(), 12027 info->optimization_id()); 12028 } else { 12029 CodeStub::Major major_key = info->code_stub()->MajorKey(); 12030 PrintStringProperty("name", CodeStub::MajorName(major_key, false)); 12031 PrintStringProperty("method", "stub"); 12032 } 12033 PrintLongProperty("date", static_cast<int64_t>(OS::TimeCurrentMillis())); 12034 } 12035 12036 12037 void HTracer::TraceLithium(const char* name, LChunk* chunk) { 12038 ASSERT(!chunk->isolate()->concurrent_recompilation_enabled()); 12039 AllowHandleDereference allow_deref; 12040 AllowDeferredHandleDereference allow_deferred_deref; 12041 Trace(name, chunk->graph(), chunk); 12042 } 12043 12044 12045 void HTracer::TraceHydrogen(const char* name, HGraph* graph) { 12046 ASSERT(!graph->isolate()->concurrent_recompilation_enabled()); 12047 AllowHandleDereference allow_deref; 12048 AllowDeferredHandleDereference allow_deferred_deref; 12049 Trace(name, graph, NULL); 12050 } 12051 12052 12053 void HTracer::Trace(const char* name, HGraph* graph, LChunk* chunk) { 12054 Tag tag(this, "cfg"); 12055 PrintStringProperty("name", name); 12056 const ZoneList<HBasicBlock*>* blocks = graph->blocks(); 12057 for (int i = 0; i < blocks->length(); i++) { 12058 HBasicBlock* current = blocks->at(i); 12059 Tag block_tag(this, "block"); 12060 PrintBlockProperty("name", current->block_id()); 12061 PrintIntProperty("from_bci", -1); 12062 PrintIntProperty("to_bci", -1); 12063 12064 if (!current->predecessors()->is_empty()) { 12065 PrintIndent(); 12066 trace_.Add("predecessors"); 12067 for (int j = 0; j < current->predecessors()->length(); ++j) { 12068 trace_.Add(" \"B%d\"", current->predecessors()->at(j)->block_id()); 12069 } 12070 trace_.Add("\n"); 12071 } else { 12072 PrintEmptyProperty("predecessors"); 12073 } 12074 12075 if (current->end()->SuccessorCount() == 0) { 12076 PrintEmptyProperty("successors"); 12077 } else { 12078 PrintIndent(); 12079 trace_.Add("successors"); 12080 for (HSuccessorIterator it(current->end()); !it.Done(); it.Advance()) { 12081 trace_.Add(" \"B%d\"", it.Current()->block_id()); 12082 } 12083 trace_.Add("\n"); 12084 } 12085 12086 PrintEmptyProperty("xhandlers"); 12087 12088 { 12089 PrintIndent(); 12090 trace_.Add("flags"); 12091 if (current->IsLoopSuccessorDominator()) { 12092 trace_.Add(" \"dom-loop-succ\""); 12093 } 12094 if (current->IsUnreachable()) { 12095 trace_.Add(" \"dead\""); 12096 } 12097 if (current->is_osr_entry()) { 12098 trace_.Add(" \"osr\""); 12099 } 12100 trace_.Add("\n"); 12101 } 12102 12103 if (current->dominator() != NULL) { 12104 PrintBlockProperty("dominator", current->dominator()->block_id()); 12105 } 12106 12107 PrintIntProperty("loop_depth", current->LoopNestingDepth()); 12108 12109 if (chunk != NULL) { 12110 int first_index = current->first_instruction_index(); 12111 int last_index = current->last_instruction_index(); 12112 PrintIntProperty( 12113 "first_lir_id", 12114 LifetimePosition::FromInstructionIndex(first_index).Value()); 12115 PrintIntProperty( 12116 "last_lir_id", 12117 LifetimePosition::FromInstructionIndex(last_index).Value()); 12118 } 12119 12120 { 12121 Tag states_tag(this, "states"); 12122 Tag locals_tag(this, "locals"); 12123 int total = current->phis()->length(); 12124 PrintIntProperty("size", current->phis()->length()); 12125 PrintStringProperty("method", "None"); 12126 for (int j = 0; j < total; ++j) { 12127 HPhi* phi = current->phis()->at(j); 12128 PrintIndent(); 12129 trace_.Add("%d ", phi->merged_index()); 12130 phi->PrintNameTo(&trace_); 12131 trace_.Add(" "); 12132 phi->PrintTo(&trace_); 12133 trace_.Add("\n"); 12134 } 12135 } 12136 12137 { 12138 Tag HIR_tag(this, "HIR"); 12139 for (HInstructionIterator it(current); !it.Done(); it.Advance()) { 12140 HInstruction* instruction = it.Current(); 12141 int uses = instruction->UseCount(); 12142 PrintIndent(); 12143 trace_.Add("0 %d ", uses); 12144 instruction->PrintNameTo(&trace_); 12145 trace_.Add(" "); 12146 instruction->PrintTo(&trace_); 12147 if (FLAG_hydrogen_track_positions && 12148 instruction->has_position() && 12149 instruction->position().raw() != 0) { 12150 const HSourcePosition pos = instruction->position(); 12151 trace_.Add(" pos:"); 12152 if (pos.inlining_id() != 0) { 12153 trace_.Add("%d_", pos.inlining_id()); 12154 } 12155 trace_.Add("%d", pos.position()); 12156 } 12157 trace_.Add(" <|@\n"); 12158 } 12159 } 12160 12161 12162 if (chunk != NULL) { 12163 Tag LIR_tag(this, "LIR"); 12164 int first_index = current->first_instruction_index(); 12165 int last_index = current->last_instruction_index(); 12166 if (first_index != -1 && last_index != -1) { 12167 const ZoneList<LInstruction*>* instructions = chunk->instructions(); 12168 for (int i = first_index; i <= last_index; ++i) { 12169 LInstruction* linstr = instructions->at(i); 12170 if (linstr != NULL) { 12171 PrintIndent(); 12172 trace_.Add("%d ", 12173 LifetimePosition::FromInstructionIndex(i).Value()); 12174 linstr->PrintTo(&trace_); 12175 trace_.Add(" [hir:"); 12176 linstr->hydrogen_value()->PrintNameTo(&trace_); 12177 trace_.Add("]"); 12178 trace_.Add(" <|@\n"); 12179 } 12180 } 12181 } 12182 } 12183 } 12184 } 12185 12186 12187 void HTracer::TraceLiveRanges(const char* name, LAllocator* allocator) { 12188 Tag tag(this, "intervals"); 12189 PrintStringProperty("name", name); 12190 12191 const Vector<LiveRange*>* fixed_d = allocator->fixed_double_live_ranges(); 12192 for (int i = 0; i < fixed_d->length(); ++i) { 12193 TraceLiveRange(fixed_d->at(i), "fixed", allocator->zone()); 12194 } 12195 12196 const Vector<LiveRange*>* fixed = allocator->fixed_live_ranges(); 12197 for (int i = 0; i < fixed->length(); ++i) { 12198 TraceLiveRange(fixed->at(i), "fixed", allocator->zone()); 12199 } 12200 12201 const ZoneList<LiveRange*>* live_ranges = allocator->live_ranges(); 12202 for (int i = 0; i < live_ranges->length(); ++i) { 12203 TraceLiveRange(live_ranges->at(i), "object", allocator->zone()); 12204 } 12205 } 12206 12207 12208 void HTracer::TraceLiveRange(LiveRange* range, const char* type, 12209 Zone* zone) { 12210 if (range != NULL && !range->IsEmpty()) { 12211 PrintIndent(); 12212 trace_.Add("%d %s", range->id(), type); 12213 if (range->HasRegisterAssigned()) { 12214 LOperand* op = range->CreateAssignedOperand(zone); 12215 int assigned_reg = op->index(); 12216 if (op->IsDoubleRegister()) { 12217 trace_.Add(" \"%s\"", 12218 DoubleRegister::AllocationIndexToString(assigned_reg)); 12219 } else { 12220 ASSERT(op->IsRegister()); 12221 trace_.Add(" \"%s\"", Register::AllocationIndexToString(assigned_reg)); 12222 } 12223 } else if (range->IsSpilled()) { 12224 LOperand* op = range->TopLevel()->GetSpillOperand(); 12225 if (op->IsDoubleStackSlot()) { 12226 trace_.Add(" \"double_stack:%d\"", op->index()); 12227 } else { 12228 ASSERT(op->IsStackSlot()); 12229 trace_.Add(" \"stack:%d\"", op->index()); 12230 } 12231 } 12232 int parent_index = -1; 12233 if (range->IsChild()) { 12234 parent_index = range->parent()->id(); 12235 } else { 12236 parent_index = range->id(); 12237 } 12238 LOperand* op = range->FirstHint(); 12239 int hint_index = -1; 12240 if (op != NULL && op->IsUnallocated()) { 12241 hint_index = LUnallocated::cast(op)->virtual_register(); 12242 } 12243 trace_.Add(" %d %d", parent_index, hint_index); 12244 UseInterval* cur_interval = range->first_interval(); 12245 while (cur_interval != NULL && range->Covers(cur_interval->start())) { 12246 trace_.Add(" [%d, %d[", 12247 cur_interval->start().Value(), 12248 cur_interval->end().Value()); 12249 cur_interval = cur_interval->next(); 12250 } 12251 12252 UsePosition* current_pos = range->first_pos(); 12253 while (current_pos != NULL) { 12254 if (current_pos->RegisterIsBeneficial() || FLAG_trace_all_uses) { 12255 trace_.Add(" %d M", current_pos->pos().Value()); 12256 } 12257 current_pos = current_pos->next(); 12258 } 12259 12260 trace_.Add(" \"\"\n"); 12261 } 12262 } 12263 12264 12265 void HTracer::FlushToFile() { 12266 AppendChars(filename_.start(), trace_.ToCString().get(), trace_.length(), 12267 false); 12268 trace_.Reset(); 12269 } 12270 12271 12272 void HStatistics::Initialize(CompilationInfo* info) { 12273 if (info->shared_info().is_null()) return; 12274 source_size_ += info->shared_info()->SourceSize(); 12275 } 12276 12277 12278 void HStatistics::Print() { 12279 PrintF("Timing results:\n"); 12280 TimeDelta sum; 12281 for (int i = 0; i < times_.length(); ++i) { 12282 sum += times_[i]; 12283 } 12284 12285 for (int i = 0; i < names_.length(); ++i) { 12286 PrintF("%32s", names_[i]); 12287 double ms = times_[i].InMillisecondsF(); 12288 double percent = times_[i].PercentOf(sum); 12289 PrintF(" %8.3f ms / %4.1f %% ", ms, percent); 12290 12291 unsigned size = sizes_[i]; 12292 double size_percent = static_cast<double>(size) * 100 / total_size_; 12293 PrintF(" %9u bytes / %4.1f %%\n", size, size_percent); 12294 } 12295 12296 PrintF("----------------------------------------" 12297 "---------------------------------------\n"); 12298 TimeDelta total = create_graph_ + optimize_graph_ + generate_code_; 12299 PrintF("%32s %8.3f ms / %4.1f %% \n", 12300 "Create graph", 12301 create_graph_.InMillisecondsF(), 12302 create_graph_.PercentOf(total)); 12303 PrintF("%32s %8.3f ms / %4.1f %% \n", 12304 "Optimize graph", 12305 optimize_graph_.InMillisecondsF(), 12306 optimize_graph_.PercentOf(total)); 12307 PrintF("%32s %8.3f ms / %4.1f %% \n", 12308 "Generate and install code", 12309 generate_code_.InMillisecondsF(), 12310 generate_code_.PercentOf(total)); 12311 PrintF("----------------------------------------" 12312 "---------------------------------------\n"); 12313 PrintF("%32s %8.3f ms (%.1f times slower than full code gen)\n", 12314 "Total", 12315 total.InMillisecondsF(), 12316 total.TimesOf(full_code_gen_)); 12317 12318 double source_size_in_kb = static_cast<double>(source_size_) / 1024; 12319 double normalized_time = source_size_in_kb > 0 12320 ? total.InMillisecondsF() / source_size_in_kb 12321 : 0; 12322 double normalized_size_in_kb = source_size_in_kb > 0 12323 ? total_size_ / 1024 / source_size_in_kb 12324 : 0; 12325 PrintF("%32s %8.3f ms %7.3f kB allocated\n", 12326 "Average per kB source", 12327 normalized_time, normalized_size_in_kb); 12328 } 12329 12330 12331 void HStatistics::SaveTiming(const char* name, TimeDelta time, unsigned size) { 12332 total_size_ += size; 12333 for (int i = 0; i < names_.length(); ++i) { 12334 if (strcmp(names_[i], name) == 0) { 12335 times_[i] += time; 12336 sizes_[i] += size; 12337 return; 12338 } 12339 } 12340 names_.Add(name); 12341 times_.Add(time); 12342 sizes_.Add(size); 12343 } 12344 12345 12346 HPhase::~HPhase() { 12347 if (ShouldProduceTraceOutput()) { 12348 isolate()->GetHTracer()->TraceHydrogen(name(), graph_); 12349 } 12350 12351 #ifdef DEBUG 12352 graph_->Verify(false); // No full verify. 12353 #endif 12354 } 12355 12356 } } // namespace v8::internal 12357