1 // Copyright 2013 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #include "src/crankshaft/hydrogen.h" 6 7 #include <sstream> 8 9 #include "src/allocation-site-scopes.h" 10 #include "src/ast/ast-numbering.h" 11 #include "src/ast/scopeinfo.h" 12 #include "src/code-factory.h" 13 #include "src/crankshaft/hydrogen-bce.h" 14 #include "src/crankshaft/hydrogen-bch.h" 15 #include "src/crankshaft/hydrogen-canonicalize.h" 16 #include "src/crankshaft/hydrogen-check-elimination.h" 17 #include "src/crankshaft/hydrogen-dce.h" 18 #include "src/crankshaft/hydrogen-dehoist.h" 19 #include "src/crankshaft/hydrogen-environment-liveness.h" 20 #include "src/crankshaft/hydrogen-escape-analysis.h" 21 #include "src/crankshaft/hydrogen-gvn.h" 22 #include "src/crankshaft/hydrogen-infer-representation.h" 23 #include "src/crankshaft/hydrogen-infer-types.h" 24 #include "src/crankshaft/hydrogen-load-elimination.h" 25 #include "src/crankshaft/hydrogen-mark-deoptimize.h" 26 #include "src/crankshaft/hydrogen-mark-unreachable.h" 27 #include "src/crankshaft/hydrogen-osr.h" 28 #include "src/crankshaft/hydrogen-range-analysis.h" 29 #include "src/crankshaft/hydrogen-redundant-phi.h" 30 #include "src/crankshaft/hydrogen-removable-simulates.h" 31 #include "src/crankshaft/hydrogen-representation-changes.h" 32 #include "src/crankshaft/hydrogen-sce.h" 33 #include "src/crankshaft/hydrogen-store-elimination.h" 34 #include "src/crankshaft/hydrogen-uint32-analysis.h" 35 #include "src/crankshaft/lithium-allocator.h" 36 #include "src/crankshaft/typing.h" 37 #include "src/full-codegen/full-codegen.h" 38 #include "src/ic/call-optimization.h" 39 #include "src/ic/ic.h" 40 // GetRootConstructor 41 #include "src/ic/ic-inl.h" 42 #include "src/isolate-inl.h" 43 #include "src/parsing/parser.h" 44 #include "src/runtime/runtime.h" 45 46 #if V8_TARGET_ARCH_IA32 47 #include "src/crankshaft/ia32/lithium-codegen-ia32.h" // NOLINT 48 #elif V8_TARGET_ARCH_X64 49 #include "src/crankshaft/x64/lithium-codegen-x64.h" // NOLINT 50 #elif V8_TARGET_ARCH_ARM64 51 #include "src/crankshaft/arm64/lithium-codegen-arm64.h" // NOLINT 52 #elif V8_TARGET_ARCH_ARM 53 #include "src/crankshaft/arm/lithium-codegen-arm.h" // NOLINT 54 #elif V8_TARGET_ARCH_PPC 55 #include "src/crankshaft/ppc/lithium-codegen-ppc.h" // NOLINT 56 #elif V8_TARGET_ARCH_MIPS 57 #include "src/crankshaft/mips/lithium-codegen-mips.h" // NOLINT 58 #elif V8_TARGET_ARCH_MIPS64 59 #include "src/crankshaft/mips64/lithium-codegen-mips64.h" // NOLINT 60 #elif V8_TARGET_ARCH_X87 61 #include "src/crankshaft/x87/lithium-codegen-x87.h" // NOLINT 62 #else 63 #error Unsupported target architecture. 64 #endif 65 66 namespace v8 { 67 namespace internal { 68 69 HBasicBlock::HBasicBlock(HGraph* graph) 70 : block_id_(graph->GetNextBlockID()), 71 graph_(graph), 72 phis_(4, graph->zone()), 73 first_(NULL), 74 last_(NULL), 75 end_(NULL), 76 loop_information_(NULL), 77 predecessors_(2, graph->zone()), 78 dominator_(NULL), 79 dominated_blocks_(4, graph->zone()), 80 last_environment_(NULL), 81 argument_count_(-1), 82 first_instruction_index_(-1), 83 last_instruction_index_(-1), 84 deleted_phis_(4, graph->zone()), 85 parent_loop_header_(NULL), 86 inlined_entry_block_(NULL), 87 is_inline_return_target_(false), 88 is_reachable_(true), 89 dominates_loop_successors_(false), 90 is_osr_entry_(false), 91 is_ordered_(false) { } 92 93 94 Isolate* HBasicBlock::isolate() const { 95 return graph_->isolate(); 96 } 97 98 99 void HBasicBlock::MarkUnreachable() { 100 is_reachable_ = false; 101 } 102 103 104 void HBasicBlock::AttachLoopInformation() { 105 DCHECK(!IsLoopHeader()); 106 loop_information_ = new(zone()) HLoopInformation(this, zone()); 107 } 108 109 110 void HBasicBlock::DetachLoopInformation() { 111 DCHECK(IsLoopHeader()); 112 loop_information_ = NULL; 113 } 114 115 116 void HBasicBlock::AddPhi(HPhi* phi) { 117 DCHECK(!IsStartBlock()); 118 phis_.Add(phi, zone()); 119 phi->SetBlock(this); 120 } 121 122 123 void HBasicBlock::RemovePhi(HPhi* phi) { 124 DCHECK(phi->block() == this); 125 DCHECK(phis_.Contains(phi)); 126 phi->Kill(); 127 phis_.RemoveElement(phi); 128 phi->SetBlock(NULL); 129 } 130 131 132 void HBasicBlock::AddInstruction(HInstruction* instr, SourcePosition position) { 133 DCHECK(!IsStartBlock() || !IsFinished()); 134 DCHECK(!instr->IsLinked()); 135 DCHECK(!IsFinished()); 136 137 if (!position.IsUnknown()) { 138 instr->set_position(position); 139 } 140 if (first_ == NULL) { 141 DCHECK(last_environment() != NULL); 142 DCHECK(!last_environment()->ast_id().IsNone()); 143 HBlockEntry* entry = new(zone()) HBlockEntry(); 144 entry->InitializeAsFirst(this); 145 if (!position.IsUnknown()) { 146 entry->set_position(position); 147 } else { 148 DCHECK(!FLAG_hydrogen_track_positions || 149 !graph()->info()->IsOptimizing() || instr->IsAbnormalExit()); 150 } 151 first_ = last_ = entry; 152 } 153 instr->InsertAfter(last_); 154 } 155 156 157 HPhi* HBasicBlock::AddNewPhi(int merged_index) { 158 if (graph()->IsInsideNoSideEffectsScope()) { 159 merged_index = HPhi::kInvalidMergedIndex; 160 } 161 HPhi* phi = new(zone()) HPhi(merged_index, zone()); 162 AddPhi(phi); 163 return phi; 164 } 165 166 167 HSimulate* HBasicBlock::CreateSimulate(BailoutId ast_id, 168 RemovableSimulate removable) { 169 DCHECK(HasEnvironment()); 170 HEnvironment* environment = last_environment(); 171 DCHECK(ast_id.IsNone() || 172 ast_id == BailoutId::StubEntry() || 173 environment->closure()->shared()->VerifyBailoutId(ast_id)); 174 175 int push_count = environment->push_count(); 176 int pop_count = environment->pop_count(); 177 178 HSimulate* instr = 179 new(zone()) HSimulate(ast_id, pop_count, zone(), removable); 180 #ifdef DEBUG 181 instr->set_closure(environment->closure()); 182 #endif 183 // Order of pushed values: newest (top of stack) first. This allows 184 // HSimulate::MergeWith() to easily append additional pushed values 185 // that are older (from further down the stack). 186 for (int i = 0; i < push_count; ++i) { 187 instr->AddPushedValue(environment->ExpressionStackAt(i)); 188 } 189 for (GrowableBitVector::Iterator it(environment->assigned_variables(), 190 zone()); 191 !it.Done(); 192 it.Advance()) { 193 int index = it.Current(); 194 instr->AddAssignedValue(index, environment->Lookup(index)); 195 } 196 environment->ClearHistory(); 197 return instr; 198 } 199 200 201 void HBasicBlock::Finish(HControlInstruction* end, SourcePosition position) { 202 DCHECK(!IsFinished()); 203 AddInstruction(end, position); 204 end_ = end; 205 for (HSuccessorIterator it(end); !it.Done(); it.Advance()) { 206 it.Current()->RegisterPredecessor(this); 207 } 208 } 209 210 211 void HBasicBlock::Goto(HBasicBlock* block, SourcePosition position, 212 FunctionState* state, bool add_simulate) { 213 bool drop_extra = state != NULL && 214 state->inlining_kind() == NORMAL_RETURN; 215 216 if (block->IsInlineReturnTarget()) { 217 HEnvironment* env = last_environment(); 218 int argument_count = env->arguments_environment()->parameter_count(); 219 AddInstruction(new(zone()) 220 HLeaveInlined(state->entry(), argument_count), 221 position); 222 UpdateEnvironment(last_environment()->DiscardInlined(drop_extra)); 223 } 224 225 if (add_simulate) AddNewSimulate(BailoutId::None(), position); 226 HGoto* instr = new(zone()) HGoto(block); 227 Finish(instr, position); 228 } 229 230 231 void HBasicBlock::AddLeaveInlined(HValue* return_value, FunctionState* state, 232 SourcePosition position) { 233 HBasicBlock* target = state->function_return(); 234 bool drop_extra = state->inlining_kind() == NORMAL_RETURN; 235 236 DCHECK(target->IsInlineReturnTarget()); 237 DCHECK(return_value != NULL); 238 HEnvironment* env = last_environment(); 239 int argument_count = env->arguments_environment()->parameter_count(); 240 AddInstruction(new(zone()) HLeaveInlined(state->entry(), argument_count), 241 position); 242 UpdateEnvironment(last_environment()->DiscardInlined(drop_extra)); 243 last_environment()->Push(return_value); 244 AddNewSimulate(BailoutId::None(), position); 245 HGoto* instr = new(zone()) HGoto(target); 246 Finish(instr, position); 247 } 248 249 250 void HBasicBlock::SetInitialEnvironment(HEnvironment* env) { 251 DCHECK(!HasEnvironment()); 252 DCHECK(first() == NULL); 253 UpdateEnvironment(env); 254 } 255 256 257 void HBasicBlock::UpdateEnvironment(HEnvironment* env) { 258 last_environment_ = env; 259 graph()->update_maximum_environment_size(env->first_expression_index()); 260 } 261 262 263 void HBasicBlock::SetJoinId(BailoutId ast_id) { 264 int length = predecessors_.length(); 265 DCHECK(length > 0); 266 for (int i = 0; i < length; i++) { 267 HBasicBlock* predecessor = predecessors_[i]; 268 DCHECK(predecessor->end()->IsGoto()); 269 HSimulate* simulate = HSimulate::cast(predecessor->end()->previous()); 270 DCHECK(i != 0 || 271 (predecessor->last_environment()->closure().is_null() || 272 predecessor->last_environment()->closure()->shared() 273 ->VerifyBailoutId(ast_id))); 274 simulate->set_ast_id(ast_id); 275 predecessor->last_environment()->set_ast_id(ast_id); 276 } 277 } 278 279 280 bool HBasicBlock::Dominates(HBasicBlock* other) const { 281 HBasicBlock* current = other->dominator(); 282 while (current != NULL) { 283 if (current == this) return true; 284 current = current->dominator(); 285 } 286 return false; 287 } 288 289 290 bool HBasicBlock::EqualToOrDominates(HBasicBlock* other) const { 291 if (this == other) return true; 292 return Dominates(other); 293 } 294 295 296 int HBasicBlock::LoopNestingDepth() const { 297 const HBasicBlock* current = this; 298 int result = (current->IsLoopHeader()) ? 1 : 0; 299 while (current->parent_loop_header() != NULL) { 300 current = current->parent_loop_header(); 301 result++; 302 } 303 return result; 304 } 305 306 307 void HBasicBlock::PostProcessLoopHeader(IterationStatement* stmt) { 308 DCHECK(IsLoopHeader()); 309 310 SetJoinId(stmt->EntryId()); 311 if (predecessors()->length() == 1) { 312 // This is a degenerated loop. 313 DetachLoopInformation(); 314 return; 315 } 316 317 // Only the first entry into the loop is from outside the loop. All other 318 // entries must be back edges. 319 for (int i = 1; i < predecessors()->length(); ++i) { 320 loop_information()->RegisterBackEdge(predecessors()->at(i)); 321 } 322 } 323 324 325 void HBasicBlock::MarkSuccEdgeUnreachable(int succ) { 326 DCHECK(IsFinished()); 327 HBasicBlock* succ_block = end()->SuccessorAt(succ); 328 329 DCHECK(succ_block->predecessors()->length() == 1); 330 succ_block->MarkUnreachable(); 331 } 332 333 334 void HBasicBlock::RegisterPredecessor(HBasicBlock* pred) { 335 if (HasPredecessor()) { 336 // Only loop header blocks can have a predecessor added after 337 // instructions have been added to the block (they have phis for all 338 // values in the environment, these phis may be eliminated later). 339 DCHECK(IsLoopHeader() || first_ == NULL); 340 HEnvironment* incoming_env = pred->last_environment(); 341 if (IsLoopHeader()) { 342 DCHECK_EQ(phis()->length(), incoming_env->length()); 343 for (int i = 0; i < phis_.length(); ++i) { 344 phis_[i]->AddInput(incoming_env->values()->at(i)); 345 } 346 } else { 347 last_environment()->AddIncomingEdge(this, pred->last_environment()); 348 } 349 } else if (!HasEnvironment() && !IsFinished()) { 350 DCHECK(!IsLoopHeader()); 351 SetInitialEnvironment(pred->last_environment()->Copy()); 352 } 353 354 predecessors_.Add(pred, zone()); 355 } 356 357 358 void HBasicBlock::AddDominatedBlock(HBasicBlock* block) { 359 DCHECK(!dominated_blocks_.Contains(block)); 360 // Keep the list of dominated blocks sorted such that if there is two 361 // succeeding block in this list, the predecessor is before the successor. 362 int index = 0; 363 while (index < dominated_blocks_.length() && 364 dominated_blocks_[index]->block_id() < block->block_id()) { 365 ++index; 366 } 367 dominated_blocks_.InsertAt(index, block, zone()); 368 } 369 370 371 void HBasicBlock::AssignCommonDominator(HBasicBlock* other) { 372 if (dominator_ == NULL) { 373 dominator_ = other; 374 other->AddDominatedBlock(this); 375 } else if (other->dominator() != NULL) { 376 HBasicBlock* first = dominator_; 377 HBasicBlock* second = other; 378 379 while (first != second) { 380 if (first->block_id() > second->block_id()) { 381 first = first->dominator(); 382 } else { 383 second = second->dominator(); 384 } 385 DCHECK(first != NULL && second != NULL); 386 } 387 388 if (dominator_ != first) { 389 DCHECK(dominator_->dominated_blocks_.Contains(this)); 390 dominator_->dominated_blocks_.RemoveElement(this); 391 dominator_ = first; 392 first->AddDominatedBlock(this); 393 } 394 } 395 } 396 397 398 void HBasicBlock::AssignLoopSuccessorDominators() { 399 // Mark blocks that dominate all subsequent reachable blocks inside their 400 // loop. Exploit the fact that blocks are sorted in reverse post order. When 401 // the loop is visited in increasing block id order, if the number of 402 // non-loop-exiting successor edges at the dominator_candidate block doesn't 403 // exceed the number of previously encountered predecessor edges, there is no 404 // path from the loop header to any block with higher id that doesn't go 405 // through the dominator_candidate block. In this case, the 406 // dominator_candidate block is guaranteed to dominate all blocks reachable 407 // from it with higher ids. 408 HBasicBlock* last = loop_information()->GetLastBackEdge(); 409 int outstanding_successors = 1; // one edge from the pre-header 410 // Header always dominates everything. 411 MarkAsLoopSuccessorDominator(); 412 for (int j = block_id(); j <= last->block_id(); ++j) { 413 HBasicBlock* dominator_candidate = graph_->blocks()->at(j); 414 for (HPredecessorIterator it(dominator_candidate); !it.Done(); 415 it.Advance()) { 416 HBasicBlock* predecessor = it.Current(); 417 // Don't count back edges. 418 if (predecessor->block_id() < dominator_candidate->block_id()) { 419 outstanding_successors--; 420 } 421 } 422 423 // If more successors than predecessors have been seen in the loop up to 424 // now, it's not possible to guarantee that the current block dominates 425 // all of the blocks with higher IDs. In this case, assume conservatively 426 // that those paths through loop that don't go through the current block 427 // contain all of the loop's dependencies. Also be careful to record 428 // dominator information about the current loop that's being processed, 429 // and not nested loops, which will be processed when 430 // AssignLoopSuccessorDominators gets called on their header. 431 DCHECK(outstanding_successors >= 0); 432 HBasicBlock* parent_loop_header = dominator_candidate->parent_loop_header(); 433 if (outstanding_successors == 0 && 434 (parent_loop_header == this && !dominator_candidate->IsLoopHeader())) { 435 dominator_candidate->MarkAsLoopSuccessorDominator(); 436 } 437 HControlInstruction* end = dominator_candidate->end(); 438 for (HSuccessorIterator it(end); !it.Done(); it.Advance()) { 439 HBasicBlock* successor = it.Current(); 440 // Only count successors that remain inside the loop and don't loop back 441 // to a loop header. 442 if (successor->block_id() > dominator_candidate->block_id() && 443 successor->block_id() <= last->block_id()) { 444 // Backwards edges must land on loop headers. 445 DCHECK(successor->block_id() > dominator_candidate->block_id() || 446 successor->IsLoopHeader()); 447 outstanding_successors++; 448 } 449 } 450 } 451 } 452 453 454 int HBasicBlock::PredecessorIndexOf(HBasicBlock* predecessor) const { 455 for (int i = 0; i < predecessors_.length(); ++i) { 456 if (predecessors_[i] == predecessor) return i; 457 } 458 UNREACHABLE(); 459 return -1; 460 } 461 462 463 #ifdef DEBUG 464 void HBasicBlock::Verify() { 465 // Check that every block is finished. 466 DCHECK(IsFinished()); 467 DCHECK(block_id() >= 0); 468 469 // Check that the incoming edges are in edge split form. 470 if (predecessors_.length() > 1) { 471 for (int i = 0; i < predecessors_.length(); ++i) { 472 DCHECK(predecessors_[i]->end()->SecondSuccessor() == NULL); 473 } 474 } 475 } 476 #endif 477 478 479 void HLoopInformation::RegisterBackEdge(HBasicBlock* block) { 480 this->back_edges_.Add(block, block->zone()); 481 AddBlock(block); 482 } 483 484 485 HBasicBlock* HLoopInformation::GetLastBackEdge() const { 486 int max_id = -1; 487 HBasicBlock* result = NULL; 488 for (int i = 0; i < back_edges_.length(); ++i) { 489 HBasicBlock* cur = back_edges_[i]; 490 if (cur->block_id() > max_id) { 491 max_id = cur->block_id(); 492 result = cur; 493 } 494 } 495 return result; 496 } 497 498 499 void HLoopInformation::AddBlock(HBasicBlock* block) { 500 if (block == loop_header()) return; 501 if (block->parent_loop_header() == loop_header()) return; 502 if (block->parent_loop_header() != NULL) { 503 AddBlock(block->parent_loop_header()); 504 } else { 505 block->set_parent_loop_header(loop_header()); 506 blocks_.Add(block, block->zone()); 507 for (int i = 0; i < block->predecessors()->length(); ++i) { 508 AddBlock(block->predecessors()->at(i)); 509 } 510 } 511 } 512 513 514 #ifdef DEBUG 515 516 // Checks reachability of the blocks in this graph and stores a bit in 517 // the BitVector "reachable()" for every block that can be reached 518 // from the start block of the graph. If "dont_visit" is non-null, the given 519 // block is treated as if it would not be part of the graph. "visited_count()" 520 // returns the number of reachable blocks. 521 class ReachabilityAnalyzer BASE_EMBEDDED { 522 public: 523 ReachabilityAnalyzer(HBasicBlock* entry_block, 524 int block_count, 525 HBasicBlock* dont_visit) 526 : visited_count_(0), 527 stack_(16, entry_block->zone()), 528 reachable_(block_count, entry_block->zone()), 529 dont_visit_(dont_visit) { 530 PushBlock(entry_block); 531 Analyze(); 532 } 533 534 int visited_count() const { return visited_count_; } 535 const BitVector* reachable() const { return &reachable_; } 536 537 private: 538 void PushBlock(HBasicBlock* block) { 539 if (block != NULL && block != dont_visit_ && 540 !reachable_.Contains(block->block_id())) { 541 reachable_.Add(block->block_id()); 542 stack_.Add(block, block->zone()); 543 visited_count_++; 544 } 545 } 546 547 void Analyze() { 548 while (!stack_.is_empty()) { 549 HControlInstruction* end = stack_.RemoveLast()->end(); 550 for (HSuccessorIterator it(end); !it.Done(); it.Advance()) { 551 PushBlock(it.Current()); 552 } 553 } 554 } 555 556 int visited_count_; 557 ZoneList<HBasicBlock*> stack_; 558 BitVector reachable_; 559 HBasicBlock* dont_visit_; 560 }; 561 562 563 void HGraph::Verify(bool do_full_verify) const { 564 Heap::RelocationLock relocation_lock(isolate()->heap()); 565 AllowHandleDereference allow_deref; 566 AllowDeferredHandleDereference allow_deferred_deref; 567 for (int i = 0; i < blocks_.length(); i++) { 568 HBasicBlock* block = blocks_.at(i); 569 570 block->Verify(); 571 572 // Check that every block contains at least one node and that only the last 573 // node is a control instruction. 574 HInstruction* current = block->first(); 575 DCHECK(current != NULL && current->IsBlockEntry()); 576 while (current != NULL) { 577 DCHECK((current->next() == NULL) == current->IsControlInstruction()); 578 DCHECK(current->block() == block); 579 current->Verify(); 580 current = current->next(); 581 } 582 583 // Check that successors are correctly set. 584 HBasicBlock* first = block->end()->FirstSuccessor(); 585 HBasicBlock* second = block->end()->SecondSuccessor(); 586 DCHECK(second == NULL || first != NULL); 587 588 // Check that the predecessor array is correct. 589 if (first != NULL) { 590 DCHECK(first->predecessors()->Contains(block)); 591 if (second != NULL) { 592 DCHECK(second->predecessors()->Contains(block)); 593 } 594 } 595 596 // Check that phis have correct arguments. 597 for (int j = 0; j < block->phis()->length(); j++) { 598 HPhi* phi = block->phis()->at(j); 599 phi->Verify(); 600 } 601 602 // Check that all join blocks have predecessors that end with an 603 // unconditional goto and agree on their environment node id. 604 if (block->predecessors()->length() >= 2) { 605 BailoutId id = 606 block->predecessors()->first()->last_environment()->ast_id(); 607 for (int k = 0; k < block->predecessors()->length(); k++) { 608 HBasicBlock* predecessor = block->predecessors()->at(k); 609 DCHECK(predecessor->end()->IsGoto() || 610 predecessor->end()->IsDeoptimize()); 611 DCHECK(predecessor->last_environment()->ast_id() == id); 612 } 613 } 614 } 615 616 // Check special property of first block to have no predecessors. 617 DCHECK(blocks_.at(0)->predecessors()->is_empty()); 618 619 if (do_full_verify) { 620 // Check that the graph is fully connected. 621 ReachabilityAnalyzer analyzer(entry_block_, blocks_.length(), NULL); 622 DCHECK(analyzer.visited_count() == blocks_.length()); 623 624 // Check that entry block dominator is NULL. 625 DCHECK(entry_block_->dominator() == NULL); 626 627 // Check dominators. 628 for (int i = 0; i < blocks_.length(); ++i) { 629 HBasicBlock* block = blocks_.at(i); 630 if (block->dominator() == NULL) { 631 // Only start block may have no dominator assigned to. 632 DCHECK(i == 0); 633 } else { 634 // Assert that block is unreachable if dominator must not be visited. 635 ReachabilityAnalyzer dominator_analyzer(entry_block_, 636 blocks_.length(), 637 block->dominator()); 638 DCHECK(!dominator_analyzer.reachable()->Contains(block->block_id())); 639 } 640 } 641 } 642 } 643 644 #endif 645 646 647 HConstant* HGraph::GetConstant(SetOncePointer<HConstant>* pointer, 648 int32_t value) { 649 if (!pointer->is_set()) { 650 // Can't pass GetInvalidContext() to HConstant::New, because that will 651 // recursively call GetConstant 652 HConstant* constant = HConstant::New(isolate(), zone(), NULL, value); 653 constant->InsertAfter(entry_block()->first()); 654 pointer->set(constant); 655 return constant; 656 } 657 return ReinsertConstantIfNecessary(pointer->get()); 658 } 659 660 661 HConstant* HGraph::ReinsertConstantIfNecessary(HConstant* constant) { 662 if (!constant->IsLinked()) { 663 // The constant was removed from the graph. Reinsert. 664 constant->ClearFlag(HValue::kIsDead); 665 constant->InsertAfter(entry_block()->first()); 666 } 667 return constant; 668 } 669 670 671 HConstant* HGraph::GetConstant0() { 672 return GetConstant(&constant_0_, 0); 673 } 674 675 676 HConstant* HGraph::GetConstant1() { 677 return GetConstant(&constant_1_, 1); 678 } 679 680 681 HConstant* HGraph::GetConstantMinus1() { 682 return GetConstant(&constant_minus1_, -1); 683 } 684 685 686 HConstant* HGraph::GetConstantBool(bool value) { 687 return value ? GetConstantTrue() : GetConstantFalse(); 688 } 689 690 691 #define DEFINE_GET_CONSTANT(Name, name, type, htype, boolean_value) \ 692 HConstant* HGraph::GetConstant##Name() { \ 693 if (!constant_##name##_.is_set()) { \ 694 HConstant* constant = new(zone()) HConstant( \ 695 Unique<Object>::CreateImmovable(isolate()->factory()->name##_value()), \ 696 Unique<Map>::CreateImmovable(isolate()->factory()->type##_map()), \ 697 false, \ 698 Representation::Tagged(), \ 699 htype, \ 700 true, \ 701 boolean_value, \ 702 false, \ 703 ODDBALL_TYPE); \ 704 constant->InsertAfter(entry_block()->first()); \ 705 constant_##name##_.set(constant); \ 706 } \ 707 return ReinsertConstantIfNecessary(constant_##name##_.get()); \ 708 } 709 710 711 DEFINE_GET_CONSTANT(Undefined, undefined, undefined, HType::Undefined(), false) 712 DEFINE_GET_CONSTANT(True, true, boolean, HType::Boolean(), true) 713 DEFINE_GET_CONSTANT(False, false, boolean, HType::Boolean(), false) 714 DEFINE_GET_CONSTANT(Hole, the_hole, the_hole, HType::None(), false) 715 DEFINE_GET_CONSTANT(Null, null, null, HType::Null(), false) 716 717 718 #undef DEFINE_GET_CONSTANT 719 720 #define DEFINE_IS_CONSTANT(Name, name) \ 721 bool HGraph::IsConstant##Name(HConstant* constant) { \ 722 return constant_##name##_.is_set() && constant == constant_##name##_.get(); \ 723 } 724 DEFINE_IS_CONSTANT(Undefined, undefined) 725 DEFINE_IS_CONSTANT(0, 0) 726 DEFINE_IS_CONSTANT(1, 1) 727 DEFINE_IS_CONSTANT(Minus1, minus1) 728 DEFINE_IS_CONSTANT(True, true) 729 DEFINE_IS_CONSTANT(False, false) 730 DEFINE_IS_CONSTANT(Hole, the_hole) 731 DEFINE_IS_CONSTANT(Null, null) 732 733 #undef DEFINE_IS_CONSTANT 734 735 736 HConstant* HGraph::GetInvalidContext() { 737 return GetConstant(&constant_invalid_context_, 0xFFFFC0C7); 738 } 739 740 741 bool HGraph::IsStandardConstant(HConstant* constant) { 742 if (IsConstantUndefined(constant)) return true; 743 if (IsConstant0(constant)) return true; 744 if (IsConstant1(constant)) return true; 745 if (IsConstantMinus1(constant)) return true; 746 if (IsConstantTrue(constant)) return true; 747 if (IsConstantFalse(constant)) return true; 748 if (IsConstantHole(constant)) return true; 749 if (IsConstantNull(constant)) return true; 750 return false; 751 } 752 753 754 HGraphBuilder::IfBuilder::IfBuilder() : builder_(NULL), needs_compare_(true) {} 755 756 757 HGraphBuilder::IfBuilder::IfBuilder(HGraphBuilder* builder) 758 : needs_compare_(true) { 759 Initialize(builder); 760 } 761 762 763 HGraphBuilder::IfBuilder::IfBuilder(HGraphBuilder* builder, 764 HIfContinuation* continuation) 765 : needs_compare_(false), first_true_block_(NULL), first_false_block_(NULL) { 766 InitializeDontCreateBlocks(builder); 767 continuation->Continue(&first_true_block_, &first_false_block_); 768 } 769 770 771 void HGraphBuilder::IfBuilder::InitializeDontCreateBlocks( 772 HGraphBuilder* builder) { 773 builder_ = builder; 774 finished_ = false; 775 did_then_ = false; 776 did_else_ = false; 777 did_else_if_ = false; 778 did_and_ = false; 779 did_or_ = false; 780 captured_ = false; 781 pending_merge_block_ = false; 782 split_edge_merge_block_ = NULL; 783 merge_at_join_blocks_ = NULL; 784 normal_merge_at_join_block_count_ = 0; 785 deopt_merge_at_join_block_count_ = 0; 786 } 787 788 789 void HGraphBuilder::IfBuilder::Initialize(HGraphBuilder* builder) { 790 InitializeDontCreateBlocks(builder); 791 HEnvironment* env = builder->environment(); 792 first_true_block_ = builder->CreateBasicBlock(env->Copy()); 793 first_false_block_ = builder->CreateBasicBlock(env->Copy()); 794 } 795 796 797 HControlInstruction* HGraphBuilder::IfBuilder::AddCompare( 798 HControlInstruction* compare) { 799 DCHECK(did_then_ == did_else_); 800 if (did_else_) { 801 // Handle if-then-elseif 802 did_else_if_ = true; 803 did_else_ = false; 804 did_then_ = false; 805 did_and_ = false; 806 did_or_ = false; 807 pending_merge_block_ = false; 808 split_edge_merge_block_ = NULL; 809 HEnvironment* env = builder()->environment(); 810 first_true_block_ = builder()->CreateBasicBlock(env->Copy()); 811 first_false_block_ = builder()->CreateBasicBlock(env->Copy()); 812 } 813 if (split_edge_merge_block_ != NULL) { 814 HEnvironment* env = first_false_block_->last_environment(); 815 HBasicBlock* split_edge = builder()->CreateBasicBlock(env->Copy()); 816 if (did_or_) { 817 compare->SetSuccessorAt(0, split_edge); 818 compare->SetSuccessorAt(1, first_false_block_); 819 } else { 820 compare->SetSuccessorAt(0, first_true_block_); 821 compare->SetSuccessorAt(1, split_edge); 822 } 823 builder()->GotoNoSimulate(split_edge, split_edge_merge_block_); 824 } else { 825 compare->SetSuccessorAt(0, first_true_block_); 826 compare->SetSuccessorAt(1, first_false_block_); 827 } 828 builder()->FinishCurrentBlock(compare); 829 needs_compare_ = false; 830 return compare; 831 } 832 833 834 void HGraphBuilder::IfBuilder::Or() { 835 DCHECK(!needs_compare_); 836 DCHECK(!did_and_); 837 did_or_ = true; 838 HEnvironment* env = first_false_block_->last_environment(); 839 if (split_edge_merge_block_ == NULL) { 840 split_edge_merge_block_ = builder()->CreateBasicBlock(env->Copy()); 841 builder()->GotoNoSimulate(first_true_block_, split_edge_merge_block_); 842 first_true_block_ = split_edge_merge_block_; 843 } 844 builder()->set_current_block(first_false_block_); 845 first_false_block_ = builder()->CreateBasicBlock(env->Copy()); 846 } 847 848 849 void HGraphBuilder::IfBuilder::And() { 850 DCHECK(!needs_compare_); 851 DCHECK(!did_or_); 852 did_and_ = true; 853 HEnvironment* env = first_false_block_->last_environment(); 854 if (split_edge_merge_block_ == NULL) { 855 split_edge_merge_block_ = builder()->CreateBasicBlock(env->Copy()); 856 builder()->GotoNoSimulate(first_false_block_, split_edge_merge_block_); 857 first_false_block_ = split_edge_merge_block_; 858 } 859 builder()->set_current_block(first_true_block_); 860 first_true_block_ = builder()->CreateBasicBlock(env->Copy()); 861 } 862 863 864 void HGraphBuilder::IfBuilder::CaptureContinuation( 865 HIfContinuation* continuation) { 866 DCHECK(!did_else_if_); 867 DCHECK(!finished_); 868 DCHECK(!captured_); 869 870 HBasicBlock* true_block = NULL; 871 HBasicBlock* false_block = NULL; 872 Finish(&true_block, &false_block); 873 DCHECK(true_block != NULL); 874 DCHECK(false_block != NULL); 875 continuation->Capture(true_block, false_block); 876 captured_ = true; 877 builder()->set_current_block(NULL); 878 End(); 879 } 880 881 882 void HGraphBuilder::IfBuilder::JoinContinuation(HIfContinuation* continuation) { 883 DCHECK(!did_else_if_); 884 DCHECK(!finished_); 885 DCHECK(!captured_); 886 HBasicBlock* true_block = NULL; 887 HBasicBlock* false_block = NULL; 888 Finish(&true_block, &false_block); 889 merge_at_join_blocks_ = NULL; 890 if (true_block != NULL && !true_block->IsFinished()) { 891 DCHECK(continuation->IsTrueReachable()); 892 builder()->GotoNoSimulate(true_block, continuation->true_branch()); 893 } 894 if (false_block != NULL && !false_block->IsFinished()) { 895 DCHECK(continuation->IsFalseReachable()); 896 builder()->GotoNoSimulate(false_block, continuation->false_branch()); 897 } 898 captured_ = true; 899 End(); 900 } 901 902 903 void HGraphBuilder::IfBuilder::Then() { 904 DCHECK(!captured_); 905 DCHECK(!finished_); 906 did_then_ = true; 907 if (needs_compare_) { 908 // Handle if's without any expressions, they jump directly to the "else" 909 // branch. However, we must pretend that the "then" branch is reachable, 910 // so that the graph builder visits it and sees any live range extending 911 // constructs within it. 912 HConstant* constant_false = builder()->graph()->GetConstantFalse(); 913 ToBooleanStub::Types boolean_type = ToBooleanStub::Types(); 914 boolean_type.Add(ToBooleanStub::BOOLEAN); 915 HBranch* branch = builder()->New<HBranch>( 916 constant_false, boolean_type, first_true_block_, first_false_block_); 917 builder()->FinishCurrentBlock(branch); 918 } 919 builder()->set_current_block(first_true_block_); 920 pending_merge_block_ = true; 921 } 922 923 924 void HGraphBuilder::IfBuilder::Else() { 925 DCHECK(did_then_); 926 DCHECK(!captured_); 927 DCHECK(!finished_); 928 AddMergeAtJoinBlock(false); 929 builder()->set_current_block(first_false_block_); 930 pending_merge_block_ = true; 931 did_else_ = true; 932 } 933 934 935 void HGraphBuilder::IfBuilder::Deopt(Deoptimizer::DeoptReason reason) { 936 DCHECK(did_then_); 937 builder()->Add<HDeoptimize>(reason, Deoptimizer::EAGER); 938 AddMergeAtJoinBlock(true); 939 } 940 941 942 void HGraphBuilder::IfBuilder::Return(HValue* value) { 943 HValue* parameter_count = builder()->graph()->GetConstantMinus1(); 944 builder()->FinishExitCurrentBlock( 945 builder()->New<HReturn>(value, parameter_count)); 946 AddMergeAtJoinBlock(false); 947 } 948 949 950 void HGraphBuilder::IfBuilder::AddMergeAtJoinBlock(bool deopt) { 951 if (!pending_merge_block_) return; 952 HBasicBlock* block = builder()->current_block(); 953 DCHECK(block == NULL || !block->IsFinished()); 954 MergeAtJoinBlock* record = new (builder()->zone()) 955 MergeAtJoinBlock(block, deopt, merge_at_join_blocks_); 956 merge_at_join_blocks_ = record; 957 if (block != NULL) { 958 DCHECK(block->end() == NULL); 959 if (deopt) { 960 normal_merge_at_join_block_count_++; 961 } else { 962 deopt_merge_at_join_block_count_++; 963 } 964 } 965 builder()->set_current_block(NULL); 966 pending_merge_block_ = false; 967 } 968 969 970 void HGraphBuilder::IfBuilder::Finish() { 971 DCHECK(!finished_); 972 if (!did_then_) { 973 Then(); 974 } 975 AddMergeAtJoinBlock(false); 976 if (!did_else_) { 977 Else(); 978 AddMergeAtJoinBlock(false); 979 } 980 finished_ = true; 981 } 982 983 984 void HGraphBuilder::IfBuilder::Finish(HBasicBlock** then_continuation, 985 HBasicBlock** else_continuation) { 986 Finish(); 987 988 MergeAtJoinBlock* else_record = merge_at_join_blocks_; 989 if (else_continuation != NULL) { 990 *else_continuation = else_record->block_; 991 } 992 MergeAtJoinBlock* then_record = else_record->next_; 993 if (then_continuation != NULL) { 994 *then_continuation = then_record->block_; 995 } 996 DCHECK(then_record->next_ == NULL); 997 } 998 999 1000 void HGraphBuilder::IfBuilder::EndUnreachable() { 1001 if (captured_) return; 1002 Finish(); 1003 builder()->set_current_block(nullptr); 1004 } 1005 1006 1007 void HGraphBuilder::IfBuilder::End() { 1008 if (captured_) return; 1009 Finish(); 1010 1011 int total_merged_blocks = normal_merge_at_join_block_count_ + 1012 deopt_merge_at_join_block_count_; 1013 DCHECK(total_merged_blocks >= 1); 1014 HBasicBlock* merge_block = 1015 total_merged_blocks == 1 ? NULL : builder()->graph()->CreateBasicBlock(); 1016 1017 // Merge non-deopt blocks first to ensure environment has right size for 1018 // padding. 1019 MergeAtJoinBlock* current = merge_at_join_blocks_; 1020 while (current != NULL) { 1021 if (!current->deopt_ && current->block_ != NULL) { 1022 // If there is only one block that makes it through to the end of the 1023 // if, then just set it as the current block and continue rather then 1024 // creating an unnecessary merge block. 1025 if (total_merged_blocks == 1) { 1026 builder()->set_current_block(current->block_); 1027 return; 1028 } 1029 builder()->GotoNoSimulate(current->block_, merge_block); 1030 } 1031 current = current->next_; 1032 } 1033 1034 // Merge deopt blocks, padding when necessary. 1035 current = merge_at_join_blocks_; 1036 while (current != NULL) { 1037 if (current->deopt_ && current->block_ != NULL) { 1038 current->block_->FinishExit( 1039 HAbnormalExit::New(builder()->isolate(), builder()->zone(), NULL), 1040 SourcePosition::Unknown()); 1041 } 1042 current = current->next_; 1043 } 1044 builder()->set_current_block(merge_block); 1045 } 1046 1047 1048 HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder) { 1049 Initialize(builder, NULL, kWhileTrue, NULL); 1050 } 1051 1052 1053 HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder, HValue* context, 1054 LoopBuilder::Direction direction) { 1055 Initialize(builder, context, direction, builder->graph()->GetConstant1()); 1056 } 1057 1058 1059 HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder, HValue* context, 1060 LoopBuilder::Direction direction, 1061 HValue* increment_amount) { 1062 Initialize(builder, context, direction, increment_amount); 1063 increment_amount_ = increment_amount; 1064 } 1065 1066 1067 void HGraphBuilder::LoopBuilder::Initialize(HGraphBuilder* builder, 1068 HValue* context, 1069 Direction direction, 1070 HValue* increment_amount) { 1071 builder_ = builder; 1072 context_ = context; 1073 direction_ = direction; 1074 increment_amount_ = increment_amount; 1075 1076 finished_ = false; 1077 header_block_ = builder->CreateLoopHeaderBlock(); 1078 body_block_ = NULL; 1079 exit_block_ = NULL; 1080 exit_trampoline_block_ = NULL; 1081 } 1082 1083 1084 HValue* HGraphBuilder::LoopBuilder::BeginBody( 1085 HValue* initial, 1086 HValue* terminating, 1087 Token::Value token) { 1088 DCHECK(direction_ != kWhileTrue); 1089 HEnvironment* env = builder_->environment(); 1090 phi_ = header_block_->AddNewPhi(env->values()->length()); 1091 phi_->AddInput(initial); 1092 env->Push(initial); 1093 builder_->GotoNoSimulate(header_block_); 1094 1095 HEnvironment* body_env = env->Copy(); 1096 HEnvironment* exit_env = env->Copy(); 1097 // Remove the phi from the expression stack 1098 body_env->Pop(); 1099 exit_env->Pop(); 1100 body_block_ = builder_->CreateBasicBlock(body_env); 1101 exit_block_ = builder_->CreateBasicBlock(exit_env); 1102 1103 builder_->set_current_block(header_block_); 1104 env->Pop(); 1105 builder_->FinishCurrentBlock(builder_->New<HCompareNumericAndBranch>( 1106 phi_, terminating, token, body_block_, exit_block_)); 1107 1108 builder_->set_current_block(body_block_); 1109 if (direction_ == kPreIncrement || direction_ == kPreDecrement) { 1110 Isolate* isolate = builder_->isolate(); 1111 HValue* one = builder_->graph()->GetConstant1(); 1112 if (direction_ == kPreIncrement) { 1113 increment_ = HAdd::New(isolate, zone(), context_, phi_, one); 1114 } else { 1115 increment_ = HSub::New(isolate, zone(), context_, phi_, one); 1116 } 1117 increment_->ClearFlag(HValue::kCanOverflow); 1118 builder_->AddInstruction(increment_); 1119 return increment_; 1120 } else { 1121 return phi_; 1122 } 1123 } 1124 1125 1126 void HGraphBuilder::LoopBuilder::BeginBody(int drop_count) { 1127 DCHECK(direction_ == kWhileTrue); 1128 HEnvironment* env = builder_->environment(); 1129 builder_->GotoNoSimulate(header_block_); 1130 builder_->set_current_block(header_block_); 1131 env->Drop(drop_count); 1132 } 1133 1134 1135 void HGraphBuilder::LoopBuilder::Break() { 1136 if (exit_trampoline_block_ == NULL) { 1137 // Its the first time we saw a break. 1138 if (direction_ == kWhileTrue) { 1139 HEnvironment* env = builder_->environment()->Copy(); 1140 exit_trampoline_block_ = builder_->CreateBasicBlock(env); 1141 } else { 1142 HEnvironment* env = exit_block_->last_environment()->Copy(); 1143 exit_trampoline_block_ = builder_->CreateBasicBlock(env); 1144 builder_->GotoNoSimulate(exit_block_, exit_trampoline_block_); 1145 } 1146 } 1147 1148 builder_->GotoNoSimulate(exit_trampoline_block_); 1149 builder_->set_current_block(NULL); 1150 } 1151 1152 1153 void HGraphBuilder::LoopBuilder::EndBody() { 1154 DCHECK(!finished_); 1155 1156 if (direction_ == kPostIncrement || direction_ == kPostDecrement) { 1157 Isolate* isolate = builder_->isolate(); 1158 if (direction_ == kPostIncrement) { 1159 increment_ = 1160 HAdd::New(isolate, zone(), context_, phi_, increment_amount_); 1161 } else { 1162 increment_ = 1163 HSub::New(isolate, zone(), context_, phi_, increment_amount_); 1164 } 1165 increment_->ClearFlag(HValue::kCanOverflow); 1166 builder_->AddInstruction(increment_); 1167 } 1168 1169 if (direction_ != kWhileTrue) { 1170 // Push the new increment value on the expression stack to merge into 1171 // the phi. 1172 builder_->environment()->Push(increment_); 1173 } 1174 HBasicBlock* last_block = builder_->current_block(); 1175 builder_->GotoNoSimulate(last_block, header_block_); 1176 header_block_->loop_information()->RegisterBackEdge(last_block); 1177 1178 if (exit_trampoline_block_ != NULL) { 1179 builder_->set_current_block(exit_trampoline_block_); 1180 } else { 1181 builder_->set_current_block(exit_block_); 1182 } 1183 finished_ = true; 1184 } 1185 1186 1187 HGraph* HGraphBuilder::CreateGraph() { 1188 graph_ = new(zone()) HGraph(info_); 1189 if (FLAG_hydrogen_stats) isolate()->GetHStatistics()->Initialize(info_); 1190 CompilationPhase phase("H_Block building", info_); 1191 set_current_block(graph()->entry_block()); 1192 if (!BuildGraph()) return NULL; 1193 graph()->FinalizeUniqueness(); 1194 return graph_; 1195 } 1196 1197 1198 HInstruction* HGraphBuilder::AddInstruction(HInstruction* instr) { 1199 DCHECK(current_block() != NULL); 1200 DCHECK(!FLAG_hydrogen_track_positions || 1201 !position_.IsUnknown() || 1202 !info_->IsOptimizing()); 1203 current_block()->AddInstruction(instr, source_position()); 1204 if (graph()->IsInsideNoSideEffectsScope()) { 1205 instr->SetFlag(HValue::kHasNoObservableSideEffects); 1206 } 1207 return instr; 1208 } 1209 1210 1211 void HGraphBuilder::FinishCurrentBlock(HControlInstruction* last) { 1212 DCHECK(!FLAG_hydrogen_track_positions || 1213 !info_->IsOptimizing() || 1214 !position_.IsUnknown()); 1215 current_block()->Finish(last, source_position()); 1216 if (last->IsReturn() || last->IsAbnormalExit()) { 1217 set_current_block(NULL); 1218 } 1219 } 1220 1221 1222 void HGraphBuilder::FinishExitCurrentBlock(HControlInstruction* instruction) { 1223 DCHECK(!FLAG_hydrogen_track_positions || !info_->IsOptimizing() || 1224 !position_.IsUnknown()); 1225 current_block()->FinishExit(instruction, source_position()); 1226 if (instruction->IsReturn() || instruction->IsAbnormalExit()) { 1227 set_current_block(NULL); 1228 } 1229 } 1230 1231 1232 void HGraphBuilder::AddIncrementCounter(StatsCounter* counter) { 1233 if (FLAG_native_code_counters && counter->Enabled()) { 1234 HValue* reference = Add<HConstant>(ExternalReference(counter)); 1235 HValue* old_value = 1236 Add<HLoadNamedField>(reference, nullptr, HObjectAccess::ForCounter()); 1237 HValue* new_value = AddUncasted<HAdd>(old_value, graph()->GetConstant1()); 1238 new_value->ClearFlag(HValue::kCanOverflow); // Ignore counter overflow 1239 Add<HStoreNamedField>(reference, HObjectAccess::ForCounter(), 1240 new_value, STORE_TO_INITIALIZED_ENTRY); 1241 } 1242 } 1243 1244 1245 void HGraphBuilder::AddSimulate(BailoutId id, 1246 RemovableSimulate removable) { 1247 DCHECK(current_block() != NULL); 1248 DCHECK(!graph()->IsInsideNoSideEffectsScope()); 1249 current_block()->AddNewSimulate(id, source_position(), removable); 1250 } 1251 1252 1253 HBasicBlock* HGraphBuilder::CreateBasicBlock(HEnvironment* env) { 1254 HBasicBlock* b = graph()->CreateBasicBlock(); 1255 b->SetInitialEnvironment(env); 1256 return b; 1257 } 1258 1259 1260 HBasicBlock* HGraphBuilder::CreateLoopHeaderBlock() { 1261 HBasicBlock* header = graph()->CreateBasicBlock(); 1262 HEnvironment* entry_env = environment()->CopyAsLoopHeader(header); 1263 header->SetInitialEnvironment(entry_env); 1264 header->AttachLoopInformation(); 1265 return header; 1266 } 1267 1268 1269 HValue* HGraphBuilder::BuildGetElementsKind(HValue* object) { 1270 HValue* map = Add<HLoadNamedField>(object, nullptr, HObjectAccess::ForMap()); 1271 1272 HValue* bit_field2 = 1273 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField2()); 1274 return BuildDecodeField<Map::ElementsKindBits>(bit_field2); 1275 } 1276 1277 1278 HValue* HGraphBuilder::BuildCheckHeapObject(HValue* obj) { 1279 if (obj->type().IsHeapObject()) return obj; 1280 return Add<HCheckHeapObject>(obj); 1281 } 1282 1283 1284 void HGraphBuilder::FinishExitWithHardDeoptimization( 1285 Deoptimizer::DeoptReason reason) { 1286 Add<HDeoptimize>(reason, Deoptimizer::EAGER); 1287 FinishExitCurrentBlock(New<HAbnormalExit>()); 1288 } 1289 1290 1291 HValue* HGraphBuilder::BuildCheckString(HValue* string) { 1292 if (!string->type().IsString()) { 1293 DCHECK(!string->IsConstant() || 1294 !HConstant::cast(string)->HasStringValue()); 1295 BuildCheckHeapObject(string); 1296 return Add<HCheckInstanceType>(string, HCheckInstanceType::IS_STRING); 1297 } 1298 return string; 1299 } 1300 1301 1302 HValue* HGraphBuilder::BuildWrapReceiver(HValue* object, HValue* function) { 1303 if (object->type().IsJSObject()) return object; 1304 if (function->IsConstant() && 1305 HConstant::cast(function)->handle(isolate())->IsJSFunction()) { 1306 Handle<JSFunction> f = Handle<JSFunction>::cast( 1307 HConstant::cast(function)->handle(isolate())); 1308 SharedFunctionInfo* shared = f->shared(); 1309 if (is_strict(shared->language_mode()) || shared->native()) return object; 1310 } 1311 return Add<HWrapReceiver>(object, function); 1312 } 1313 1314 1315 HValue* HGraphBuilder::BuildCheckAndGrowElementsCapacity( 1316 HValue* object, HValue* elements, ElementsKind kind, HValue* length, 1317 HValue* capacity, HValue* key) { 1318 HValue* max_gap = Add<HConstant>(static_cast<int32_t>(JSObject::kMaxGap)); 1319 HValue* max_capacity = AddUncasted<HAdd>(capacity, max_gap); 1320 Add<HBoundsCheck>(key, max_capacity); 1321 1322 HValue* new_capacity = BuildNewElementsCapacity(key); 1323 HValue* new_elements = BuildGrowElementsCapacity(object, elements, kind, kind, 1324 length, new_capacity); 1325 return new_elements; 1326 } 1327 1328 1329 HValue* HGraphBuilder::BuildCheckForCapacityGrow( 1330 HValue* object, 1331 HValue* elements, 1332 ElementsKind kind, 1333 HValue* length, 1334 HValue* key, 1335 bool is_js_array, 1336 PropertyAccessType access_type) { 1337 IfBuilder length_checker(this); 1338 1339 Token::Value token = IsHoleyElementsKind(kind) ? Token::GTE : Token::EQ; 1340 length_checker.If<HCompareNumericAndBranch>(key, length, token); 1341 1342 length_checker.Then(); 1343 1344 HValue* current_capacity = AddLoadFixedArrayLength(elements); 1345 1346 if (top_info()->IsStub()) { 1347 IfBuilder capacity_checker(this); 1348 capacity_checker.If<HCompareNumericAndBranch>(key, current_capacity, 1349 Token::GTE); 1350 capacity_checker.Then(); 1351 HValue* new_elements = BuildCheckAndGrowElementsCapacity( 1352 object, elements, kind, length, current_capacity, key); 1353 environment()->Push(new_elements); 1354 capacity_checker.Else(); 1355 environment()->Push(elements); 1356 capacity_checker.End(); 1357 } else { 1358 HValue* result = Add<HMaybeGrowElements>( 1359 object, elements, key, current_capacity, is_js_array, kind); 1360 environment()->Push(result); 1361 } 1362 1363 if (is_js_array) { 1364 HValue* new_length = AddUncasted<HAdd>(key, graph_->GetConstant1()); 1365 new_length->ClearFlag(HValue::kCanOverflow); 1366 1367 Add<HStoreNamedField>(object, HObjectAccess::ForArrayLength(kind), 1368 new_length); 1369 } 1370 1371 if (access_type == STORE && kind == FAST_SMI_ELEMENTS) { 1372 HValue* checked_elements = environment()->Top(); 1373 1374 // Write zero to ensure that the new element is initialized with some smi. 1375 Add<HStoreKeyed>(checked_elements, key, graph()->GetConstant0(), nullptr, 1376 kind); 1377 } 1378 1379 length_checker.Else(); 1380 Add<HBoundsCheck>(key, length); 1381 1382 environment()->Push(elements); 1383 length_checker.End(); 1384 1385 return environment()->Pop(); 1386 } 1387 1388 1389 HValue* HGraphBuilder::BuildCopyElementsOnWrite(HValue* object, 1390 HValue* elements, 1391 ElementsKind kind, 1392 HValue* length) { 1393 Factory* factory = isolate()->factory(); 1394 1395 IfBuilder cow_checker(this); 1396 1397 cow_checker.If<HCompareMap>(elements, factory->fixed_cow_array_map()); 1398 cow_checker.Then(); 1399 1400 HValue* capacity = AddLoadFixedArrayLength(elements); 1401 1402 HValue* new_elements = BuildGrowElementsCapacity(object, elements, kind, 1403 kind, length, capacity); 1404 1405 environment()->Push(new_elements); 1406 1407 cow_checker.Else(); 1408 1409 environment()->Push(elements); 1410 1411 cow_checker.End(); 1412 1413 return environment()->Pop(); 1414 } 1415 1416 1417 void HGraphBuilder::BuildTransitionElementsKind(HValue* object, 1418 HValue* map, 1419 ElementsKind from_kind, 1420 ElementsKind to_kind, 1421 bool is_jsarray) { 1422 DCHECK(!IsFastHoleyElementsKind(from_kind) || 1423 IsFastHoleyElementsKind(to_kind)); 1424 1425 if (AllocationSite::GetMode(from_kind, to_kind) == TRACK_ALLOCATION_SITE) { 1426 Add<HTrapAllocationMemento>(object); 1427 } 1428 1429 if (!IsSimpleMapChangeTransition(from_kind, to_kind)) { 1430 HInstruction* elements = AddLoadElements(object); 1431 1432 HInstruction* empty_fixed_array = Add<HConstant>( 1433 isolate()->factory()->empty_fixed_array()); 1434 1435 IfBuilder if_builder(this); 1436 1437 if_builder.IfNot<HCompareObjectEqAndBranch>(elements, empty_fixed_array); 1438 1439 if_builder.Then(); 1440 1441 HInstruction* elements_length = AddLoadFixedArrayLength(elements); 1442 1443 HInstruction* array_length = 1444 is_jsarray 1445 ? Add<HLoadNamedField>(object, nullptr, 1446 HObjectAccess::ForArrayLength(from_kind)) 1447 : elements_length; 1448 1449 BuildGrowElementsCapacity(object, elements, from_kind, to_kind, 1450 array_length, elements_length); 1451 1452 if_builder.End(); 1453 } 1454 1455 Add<HStoreNamedField>(object, HObjectAccess::ForMap(), map); 1456 } 1457 1458 1459 void HGraphBuilder::BuildJSObjectCheck(HValue* receiver, 1460 int bit_field_mask) { 1461 // Check that the object isn't a smi. 1462 Add<HCheckHeapObject>(receiver); 1463 1464 // Get the map of the receiver. 1465 HValue* map = 1466 Add<HLoadNamedField>(receiver, nullptr, HObjectAccess::ForMap()); 1467 1468 // Check the instance type and if an access check is needed, this can be 1469 // done with a single load, since both bytes are adjacent in the map. 1470 HObjectAccess access(HObjectAccess::ForMapInstanceTypeAndBitField()); 1471 HValue* instance_type_and_bit_field = 1472 Add<HLoadNamedField>(map, nullptr, access); 1473 1474 HValue* mask = Add<HConstant>(0x00FF | (bit_field_mask << 8)); 1475 HValue* and_result = AddUncasted<HBitwise>(Token::BIT_AND, 1476 instance_type_and_bit_field, 1477 mask); 1478 HValue* sub_result = AddUncasted<HSub>(and_result, 1479 Add<HConstant>(JS_OBJECT_TYPE)); 1480 Add<HBoundsCheck>(sub_result, 1481 Add<HConstant>(LAST_JS_OBJECT_TYPE + 1 - JS_OBJECT_TYPE)); 1482 } 1483 1484 1485 void HGraphBuilder::BuildKeyedIndexCheck(HValue* key, 1486 HIfContinuation* join_continuation) { 1487 // The sometimes unintuitively backward ordering of the ifs below is 1488 // convoluted, but necessary. All of the paths must guarantee that the 1489 // if-true of the continuation returns a smi element index and the if-false of 1490 // the continuation returns either a symbol or a unique string key. All other 1491 // object types cause a deopt to fall back to the runtime. 1492 1493 IfBuilder key_smi_if(this); 1494 key_smi_if.If<HIsSmiAndBranch>(key); 1495 key_smi_if.Then(); 1496 { 1497 Push(key); // Nothing to do, just continue to true of continuation. 1498 } 1499 key_smi_if.Else(); 1500 { 1501 HValue* map = Add<HLoadNamedField>(key, nullptr, HObjectAccess::ForMap()); 1502 HValue* instance_type = 1503 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapInstanceType()); 1504 1505 // Non-unique string, check for a string with a hash code that is actually 1506 // an index. 1507 STATIC_ASSERT(LAST_UNIQUE_NAME_TYPE == FIRST_NONSTRING_TYPE); 1508 IfBuilder not_string_or_name_if(this); 1509 not_string_or_name_if.If<HCompareNumericAndBranch>( 1510 instance_type, 1511 Add<HConstant>(LAST_UNIQUE_NAME_TYPE), 1512 Token::GT); 1513 1514 not_string_or_name_if.Then(); 1515 { 1516 // Non-smi, non-Name, non-String: Try to convert to smi in case of 1517 // HeapNumber. 1518 // TODO(danno): This could call some variant of ToString 1519 Push(AddUncasted<HForceRepresentation>(key, Representation::Smi())); 1520 } 1521 not_string_or_name_if.Else(); 1522 { 1523 // String or Name: check explicitly for Name, they can short-circuit 1524 // directly to unique non-index key path. 1525 IfBuilder not_symbol_if(this); 1526 not_symbol_if.If<HCompareNumericAndBranch>( 1527 instance_type, 1528 Add<HConstant>(SYMBOL_TYPE), 1529 Token::NE); 1530 1531 not_symbol_if.Then(); 1532 { 1533 // String: check whether the String is a String of an index. If it is, 1534 // extract the index value from the hash. 1535 HValue* hash = Add<HLoadNamedField>(key, nullptr, 1536 HObjectAccess::ForNameHashField()); 1537 HValue* not_index_mask = Add<HConstant>(static_cast<int>( 1538 String::kContainsCachedArrayIndexMask)); 1539 1540 HValue* not_index_test = AddUncasted<HBitwise>( 1541 Token::BIT_AND, hash, not_index_mask); 1542 1543 IfBuilder string_index_if(this); 1544 string_index_if.If<HCompareNumericAndBranch>(not_index_test, 1545 graph()->GetConstant0(), 1546 Token::EQ); 1547 string_index_if.Then(); 1548 { 1549 // String with index in hash: extract string and merge to index path. 1550 Push(BuildDecodeField<String::ArrayIndexValueBits>(hash)); 1551 } 1552 string_index_if.Else(); 1553 { 1554 // Key is a non-index String, check for uniqueness/internalization. 1555 // If it's not internalized yet, internalize it now. 1556 HValue* not_internalized_bit = AddUncasted<HBitwise>( 1557 Token::BIT_AND, 1558 instance_type, 1559 Add<HConstant>(static_cast<int>(kIsNotInternalizedMask))); 1560 1561 IfBuilder internalized(this); 1562 internalized.If<HCompareNumericAndBranch>(not_internalized_bit, 1563 graph()->GetConstant0(), 1564 Token::EQ); 1565 internalized.Then(); 1566 Push(key); 1567 1568 internalized.Else(); 1569 Add<HPushArguments>(key); 1570 HValue* intern_key = Add<HCallRuntime>( 1571 Runtime::FunctionForId(Runtime::kInternalizeString), 1); 1572 Push(intern_key); 1573 1574 internalized.End(); 1575 // Key guaranteed to be a unique string 1576 } 1577 string_index_if.JoinContinuation(join_continuation); 1578 } 1579 not_symbol_if.Else(); 1580 { 1581 Push(key); // Key is symbol 1582 } 1583 not_symbol_if.JoinContinuation(join_continuation); 1584 } 1585 not_string_or_name_if.JoinContinuation(join_continuation); 1586 } 1587 key_smi_if.JoinContinuation(join_continuation); 1588 } 1589 1590 1591 void HGraphBuilder::BuildNonGlobalObjectCheck(HValue* receiver) { 1592 // Get the the instance type of the receiver, and make sure that it is 1593 // not one of the global object types. 1594 HValue* map = 1595 Add<HLoadNamedField>(receiver, nullptr, HObjectAccess::ForMap()); 1596 HValue* instance_type = 1597 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapInstanceType()); 1598 HValue* global_type = Add<HConstant>(JS_GLOBAL_OBJECT_TYPE); 1599 1600 IfBuilder if_global_object(this); 1601 if_global_object.If<HCompareNumericAndBranch>(instance_type, global_type, 1602 Token::EQ); 1603 if_global_object.ThenDeopt(Deoptimizer::kReceiverWasAGlobalObject); 1604 if_global_object.End(); 1605 } 1606 1607 1608 void HGraphBuilder::BuildTestForDictionaryProperties( 1609 HValue* object, 1610 HIfContinuation* continuation) { 1611 HValue* properties = Add<HLoadNamedField>( 1612 object, nullptr, HObjectAccess::ForPropertiesPointer()); 1613 HValue* properties_map = 1614 Add<HLoadNamedField>(properties, nullptr, HObjectAccess::ForMap()); 1615 HValue* hash_map = Add<HLoadRoot>(Heap::kHashTableMapRootIndex); 1616 IfBuilder builder(this); 1617 builder.If<HCompareObjectEqAndBranch>(properties_map, hash_map); 1618 builder.CaptureContinuation(continuation); 1619 } 1620 1621 1622 HValue* HGraphBuilder::BuildKeyedLookupCacheHash(HValue* object, 1623 HValue* key) { 1624 // Load the map of the receiver, compute the keyed lookup cache hash 1625 // based on 32 bits of the map pointer and the string hash. 1626 HValue* object_map = 1627 Add<HLoadNamedField>(object, nullptr, HObjectAccess::ForMapAsInteger32()); 1628 HValue* shifted_map = AddUncasted<HShr>( 1629 object_map, Add<HConstant>(KeyedLookupCache::kMapHashShift)); 1630 HValue* string_hash = 1631 Add<HLoadNamedField>(key, nullptr, HObjectAccess::ForStringHashField()); 1632 HValue* shifted_hash = AddUncasted<HShr>( 1633 string_hash, Add<HConstant>(String::kHashShift)); 1634 HValue* xor_result = AddUncasted<HBitwise>(Token::BIT_XOR, shifted_map, 1635 shifted_hash); 1636 int mask = (KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask); 1637 return AddUncasted<HBitwise>(Token::BIT_AND, xor_result, 1638 Add<HConstant>(mask)); 1639 } 1640 1641 1642 HValue* HGraphBuilder::BuildElementIndexHash(HValue* index) { 1643 int32_t seed_value = static_cast<uint32_t>(isolate()->heap()->HashSeed()); 1644 HValue* seed = Add<HConstant>(seed_value); 1645 HValue* hash = AddUncasted<HBitwise>(Token::BIT_XOR, index, seed); 1646 1647 // hash = ~hash + (hash << 15); 1648 HValue* shifted_hash = AddUncasted<HShl>(hash, Add<HConstant>(15)); 1649 HValue* not_hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash, 1650 graph()->GetConstantMinus1()); 1651 hash = AddUncasted<HAdd>(shifted_hash, not_hash); 1652 1653 // hash = hash ^ (hash >> 12); 1654 shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(12)); 1655 hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash); 1656 1657 // hash = hash + (hash << 2); 1658 shifted_hash = AddUncasted<HShl>(hash, Add<HConstant>(2)); 1659 hash = AddUncasted<HAdd>(hash, shifted_hash); 1660 1661 // hash = hash ^ (hash >> 4); 1662 shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(4)); 1663 hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash); 1664 1665 // hash = hash * 2057; 1666 hash = AddUncasted<HMul>(hash, Add<HConstant>(2057)); 1667 hash->ClearFlag(HValue::kCanOverflow); 1668 1669 // hash = hash ^ (hash >> 16); 1670 shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(16)); 1671 return AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash); 1672 } 1673 1674 1675 HValue* HGraphBuilder::BuildUncheckedDictionaryElementLoad( 1676 HValue* receiver, HValue* elements, HValue* key, HValue* hash, 1677 LanguageMode language_mode) { 1678 HValue* capacity = 1679 Add<HLoadKeyed>(elements, Add<HConstant>(NameDictionary::kCapacityIndex), 1680 nullptr, nullptr, FAST_ELEMENTS); 1681 1682 HValue* mask = AddUncasted<HSub>(capacity, graph()->GetConstant1()); 1683 mask->ChangeRepresentation(Representation::Integer32()); 1684 mask->ClearFlag(HValue::kCanOverflow); 1685 1686 HValue* entry = hash; 1687 HValue* count = graph()->GetConstant1(); 1688 Push(entry); 1689 Push(count); 1690 1691 HIfContinuation return_or_loop_continuation(graph()->CreateBasicBlock(), 1692 graph()->CreateBasicBlock()); 1693 HIfContinuation found_key_match_continuation(graph()->CreateBasicBlock(), 1694 graph()->CreateBasicBlock()); 1695 LoopBuilder probe_loop(this); 1696 probe_loop.BeginBody(2); // Drop entry, count from last environment to 1697 // appease live range building without simulates. 1698 1699 count = Pop(); 1700 entry = Pop(); 1701 entry = AddUncasted<HBitwise>(Token::BIT_AND, entry, mask); 1702 int entry_size = SeededNumberDictionary::kEntrySize; 1703 HValue* base_index = AddUncasted<HMul>(entry, Add<HConstant>(entry_size)); 1704 base_index->ClearFlag(HValue::kCanOverflow); 1705 int start_offset = SeededNumberDictionary::kElementsStartIndex; 1706 HValue* key_index = 1707 AddUncasted<HAdd>(base_index, Add<HConstant>(start_offset)); 1708 key_index->ClearFlag(HValue::kCanOverflow); 1709 1710 HValue* candidate_key = 1711 Add<HLoadKeyed>(elements, key_index, nullptr, nullptr, FAST_ELEMENTS); 1712 IfBuilder if_undefined(this); 1713 if_undefined.If<HCompareObjectEqAndBranch>(candidate_key, 1714 graph()->GetConstantUndefined()); 1715 if_undefined.Then(); 1716 { 1717 // element == undefined means "not found". Call the runtime. 1718 // TODO(jkummerow): walk the prototype chain instead. 1719 Add<HPushArguments>(receiver, key); 1720 Push(Add<HCallRuntime>( 1721 Runtime::FunctionForId(is_strong(language_mode) 1722 ? Runtime::kKeyedGetPropertyStrong 1723 : Runtime::kKeyedGetProperty), 1724 2)); 1725 } 1726 if_undefined.Else(); 1727 { 1728 IfBuilder if_match(this); 1729 if_match.If<HCompareObjectEqAndBranch>(candidate_key, key); 1730 if_match.Then(); 1731 if_match.Else(); 1732 1733 // Update non-internalized string in the dictionary with internalized key? 1734 IfBuilder if_update_with_internalized(this); 1735 HValue* smi_check = 1736 if_update_with_internalized.IfNot<HIsSmiAndBranch>(candidate_key); 1737 if_update_with_internalized.And(); 1738 HValue* map = AddLoadMap(candidate_key, smi_check); 1739 HValue* instance_type = 1740 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapInstanceType()); 1741 HValue* not_internalized_bit = AddUncasted<HBitwise>( 1742 Token::BIT_AND, instance_type, 1743 Add<HConstant>(static_cast<int>(kIsNotInternalizedMask))); 1744 if_update_with_internalized.If<HCompareNumericAndBranch>( 1745 not_internalized_bit, graph()->GetConstant0(), Token::NE); 1746 if_update_with_internalized.And(); 1747 if_update_with_internalized.IfNot<HCompareObjectEqAndBranch>( 1748 candidate_key, graph()->GetConstantHole()); 1749 if_update_with_internalized.AndIf<HStringCompareAndBranch>(candidate_key, 1750 key, Token::EQ); 1751 if_update_with_internalized.Then(); 1752 // Replace a key that is a non-internalized string by the equivalent 1753 // internalized string for faster further lookups. 1754 Add<HStoreKeyed>(elements, key_index, key, nullptr, FAST_ELEMENTS); 1755 if_update_with_internalized.Else(); 1756 1757 if_update_with_internalized.JoinContinuation(&found_key_match_continuation); 1758 if_match.JoinContinuation(&found_key_match_continuation); 1759 1760 IfBuilder found_key_match(this, &found_key_match_continuation); 1761 found_key_match.Then(); 1762 // Key at current probe matches. Relevant bits in the |details| field must 1763 // be zero, otherwise the dictionary element requires special handling. 1764 HValue* details_index = 1765 AddUncasted<HAdd>(base_index, Add<HConstant>(start_offset + 2)); 1766 details_index->ClearFlag(HValue::kCanOverflow); 1767 HValue* details = Add<HLoadKeyed>(elements, details_index, nullptr, nullptr, 1768 FAST_ELEMENTS); 1769 int details_mask = PropertyDetails::TypeField::kMask; 1770 details = AddUncasted<HBitwise>(Token::BIT_AND, details, 1771 Add<HConstant>(details_mask)); 1772 IfBuilder details_compare(this); 1773 details_compare.If<HCompareNumericAndBranch>( 1774 details, graph()->GetConstant0(), Token::EQ); 1775 details_compare.Then(); 1776 HValue* result_index = 1777 AddUncasted<HAdd>(base_index, Add<HConstant>(start_offset + 1)); 1778 result_index->ClearFlag(HValue::kCanOverflow); 1779 Push(Add<HLoadKeyed>(elements, result_index, nullptr, nullptr, 1780 FAST_ELEMENTS)); 1781 details_compare.Else(); 1782 Add<HPushArguments>(receiver, key); 1783 Push(Add<HCallRuntime>( 1784 Runtime::FunctionForId(is_strong(language_mode) 1785 ? Runtime::kKeyedGetPropertyStrong 1786 : Runtime::kKeyedGetProperty), 1787 2)); 1788 details_compare.End(); 1789 1790 found_key_match.Else(); 1791 found_key_match.JoinContinuation(&return_or_loop_continuation); 1792 } 1793 if_undefined.JoinContinuation(&return_or_loop_continuation); 1794 1795 IfBuilder return_or_loop(this, &return_or_loop_continuation); 1796 return_or_loop.Then(); 1797 probe_loop.Break(); 1798 1799 return_or_loop.Else(); 1800 entry = AddUncasted<HAdd>(entry, count); 1801 entry->ClearFlag(HValue::kCanOverflow); 1802 count = AddUncasted<HAdd>(count, graph()->GetConstant1()); 1803 count->ClearFlag(HValue::kCanOverflow); 1804 Push(entry); 1805 Push(count); 1806 1807 probe_loop.EndBody(); 1808 1809 return_or_loop.End(); 1810 1811 return Pop(); 1812 } 1813 1814 1815 HValue* HGraphBuilder::BuildCreateIterResultObject(HValue* value, 1816 HValue* done) { 1817 NoObservableSideEffectsScope scope(this); 1818 1819 // Allocate the JSIteratorResult object. 1820 HValue* result = 1821 Add<HAllocate>(Add<HConstant>(JSIteratorResult::kSize), HType::JSObject(), 1822 NOT_TENURED, JS_ITERATOR_RESULT_TYPE); 1823 1824 // Initialize the JSIteratorResult object. 1825 HValue* native_context = BuildGetNativeContext(); 1826 HValue* map = Add<HLoadNamedField>( 1827 native_context, nullptr, 1828 HObjectAccess::ForContextSlot(Context::ITERATOR_RESULT_MAP_INDEX)); 1829 Add<HStoreNamedField>(result, HObjectAccess::ForMap(), map); 1830 HValue* empty_fixed_array = Add<HLoadRoot>(Heap::kEmptyFixedArrayRootIndex); 1831 Add<HStoreNamedField>(result, HObjectAccess::ForPropertiesPointer(), 1832 empty_fixed_array); 1833 Add<HStoreNamedField>(result, HObjectAccess::ForElementsPointer(), 1834 empty_fixed_array); 1835 Add<HStoreNamedField>(result, HObjectAccess::ForObservableJSObjectOffset( 1836 JSIteratorResult::kValueOffset), 1837 value); 1838 Add<HStoreNamedField>(result, HObjectAccess::ForObservableJSObjectOffset( 1839 JSIteratorResult::kDoneOffset), 1840 done); 1841 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize); 1842 return result; 1843 } 1844 1845 1846 HValue* HGraphBuilder::BuildRegExpConstructResult(HValue* length, 1847 HValue* index, 1848 HValue* input) { 1849 NoObservableSideEffectsScope scope(this); 1850 HConstant* max_length = Add<HConstant>(JSArray::kInitialMaxFastElementArray); 1851 Add<HBoundsCheck>(length, max_length); 1852 1853 // Generate size calculation code here in order to make it dominate 1854 // the JSRegExpResult allocation. 1855 ElementsKind elements_kind = FAST_ELEMENTS; 1856 HValue* size = BuildCalculateElementsSize(elements_kind, length); 1857 1858 // Allocate the JSRegExpResult and the FixedArray in one step. 1859 HValue* result = Add<HAllocate>( 1860 Add<HConstant>(JSRegExpResult::kSize), HType::JSArray(), 1861 NOT_TENURED, JS_ARRAY_TYPE); 1862 1863 // Initialize the JSRegExpResult header. 1864 HValue* native_context = Add<HLoadNamedField>( 1865 context(), nullptr, 1866 HObjectAccess::ForContextSlot(Context::NATIVE_CONTEXT_INDEX)); 1867 Add<HStoreNamedField>( 1868 result, HObjectAccess::ForMap(), 1869 Add<HLoadNamedField>( 1870 native_context, nullptr, 1871 HObjectAccess::ForContextSlot(Context::REGEXP_RESULT_MAP_INDEX))); 1872 HConstant* empty_fixed_array = 1873 Add<HConstant>(isolate()->factory()->empty_fixed_array()); 1874 Add<HStoreNamedField>( 1875 result, HObjectAccess::ForJSArrayOffset(JSArray::kPropertiesOffset), 1876 empty_fixed_array); 1877 Add<HStoreNamedField>( 1878 result, HObjectAccess::ForJSArrayOffset(JSArray::kElementsOffset), 1879 empty_fixed_array); 1880 Add<HStoreNamedField>( 1881 result, HObjectAccess::ForJSArrayOffset(JSArray::kLengthOffset), length); 1882 1883 // Initialize the additional fields. 1884 Add<HStoreNamedField>( 1885 result, HObjectAccess::ForJSArrayOffset(JSRegExpResult::kIndexOffset), 1886 index); 1887 Add<HStoreNamedField>( 1888 result, HObjectAccess::ForJSArrayOffset(JSRegExpResult::kInputOffset), 1889 input); 1890 1891 // Allocate and initialize the elements header. 1892 HAllocate* elements = BuildAllocateElements(elements_kind, size); 1893 BuildInitializeElementsHeader(elements, elements_kind, length); 1894 1895 if (!elements->has_size_upper_bound()) { 1896 HConstant* size_in_bytes_upper_bound = EstablishElementsAllocationSize( 1897 elements_kind, max_length->Integer32Value()); 1898 elements->set_size_upper_bound(size_in_bytes_upper_bound); 1899 } 1900 1901 Add<HStoreNamedField>( 1902 result, HObjectAccess::ForJSArrayOffset(JSArray::kElementsOffset), 1903 elements); 1904 1905 // Initialize the elements contents with undefined. 1906 BuildFillElementsWithValue( 1907 elements, elements_kind, graph()->GetConstant0(), length, 1908 graph()->GetConstantUndefined()); 1909 1910 return result; 1911 } 1912 1913 1914 HValue* HGraphBuilder::BuildNumberToString(HValue* object, Type* type) { 1915 NoObservableSideEffectsScope scope(this); 1916 1917 // Convert constant numbers at compile time. 1918 if (object->IsConstant() && HConstant::cast(object)->HasNumberValue()) { 1919 Handle<Object> number = HConstant::cast(object)->handle(isolate()); 1920 Handle<String> result = isolate()->factory()->NumberToString(number); 1921 return Add<HConstant>(result); 1922 } 1923 1924 // Create a joinable continuation. 1925 HIfContinuation found(graph()->CreateBasicBlock(), 1926 graph()->CreateBasicBlock()); 1927 1928 // Load the number string cache. 1929 HValue* number_string_cache = 1930 Add<HLoadRoot>(Heap::kNumberStringCacheRootIndex); 1931 1932 // Make the hash mask from the length of the number string cache. It 1933 // contains two elements (number and string) for each cache entry. 1934 HValue* mask = AddLoadFixedArrayLength(number_string_cache); 1935 mask->set_type(HType::Smi()); 1936 mask = AddUncasted<HSar>(mask, graph()->GetConstant1()); 1937 mask = AddUncasted<HSub>(mask, graph()->GetConstant1()); 1938 1939 // Check whether object is a smi. 1940 IfBuilder if_objectissmi(this); 1941 if_objectissmi.If<HIsSmiAndBranch>(object); 1942 if_objectissmi.Then(); 1943 { 1944 // Compute hash for smi similar to smi_get_hash(). 1945 HValue* hash = AddUncasted<HBitwise>(Token::BIT_AND, object, mask); 1946 1947 // Load the key. 1948 HValue* key_index = AddUncasted<HShl>(hash, graph()->GetConstant1()); 1949 HValue* key = Add<HLoadKeyed>(number_string_cache, key_index, nullptr, 1950 nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE); 1951 1952 // Check if object == key. 1953 IfBuilder if_objectiskey(this); 1954 if_objectiskey.If<HCompareObjectEqAndBranch>(object, key); 1955 if_objectiskey.Then(); 1956 { 1957 // Make the key_index available. 1958 Push(key_index); 1959 } 1960 if_objectiskey.JoinContinuation(&found); 1961 } 1962 if_objectissmi.Else(); 1963 { 1964 if (type->Is(Type::SignedSmall())) { 1965 if_objectissmi.Deopt(Deoptimizer::kExpectedSmi); 1966 } else { 1967 // Check if the object is a heap number. 1968 IfBuilder if_objectisnumber(this); 1969 HValue* objectisnumber = if_objectisnumber.If<HCompareMap>( 1970 object, isolate()->factory()->heap_number_map()); 1971 if_objectisnumber.Then(); 1972 { 1973 // Compute hash for heap number similar to double_get_hash(). 1974 HValue* low = Add<HLoadNamedField>( 1975 object, objectisnumber, 1976 HObjectAccess::ForHeapNumberValueLowestBits()); 1977 HValue* high = Add<HLoadNamedField>( 1978 object, objectisnumber, 1979 HObjectAccess::ForHeapNumberValueHighestBits()); 1980 HValue* hash = AddUncasted<HBitwise>(Token::BIT_XOR, low, high); 1981 hash = AddUncasted<HBitwise>(Token::BIT_AND, hash, mask); 1982 1983 // Load the key. 1984 HValue* key_index = AddUncasted<HShl>(hash, graph()->GetConstant1()); 1985 HValue* key = 1986 Add<HLoadKeyed>(number_string_cache, key_index, nullptr, nullptr, 1987 FAST_ELEMENTS, ALLOW_RETURN_HOLE); 1988 1989 // Check if the key is a heap number and compare it with the object. 1990 IfBuilder if_keyisnotsmi(this); 1991 HValue* keyisnotsmi = if_keyisnotsmi.IfNot<HIsSmiAndBranch>(key); 1992 if_keyisnotsmi.Then(); 1993 { 1994 IfBuilder if_keyisheapnumber(this); 1995 if_keyisheapnumber.If<HCompareMap>( 1996 key, isolate()->factory()->heap_number_map()); 1997 if_keyisheapnumber.Then(); 1998 { 1999 // Check if values of key and object match. 2000 IfBuilder if_keyeqobject(this); 2001 if_keyeqobject.If<HCompareNumericAndBranch>( 2002 Add<HLoadNamedField>(key, keyisnotsmi, 2003 HObjectAccess::ForHeapNumberValue()), 2004 Add<HLoadNamedField>(object, objectisnumber, 2005 HObjectAccess::ForHeapNumberValue()), 2006 Token::EQ); 2007 if_keyeqobject.Then(); 2008 { 2009 // Make the key_index available. 2010 Push(key_index); 2011 } 2012 if_keyeqobject.JoinContinuation(&found); 2013 } 2014 if_keyisheapnumber.JoinContinuation(&found); 2015 } 2016 if_keyisnotsmi.JoinContinuation(&found); 2017 } 2018 if_objectisnumber.Else(); 2019 { 2020 if (type->Is(Type::Number())) { 2021 if_objectisnumber.Deopt(Deoptimizer::kExpectedHeapNumber); 2022 } 2023 } 2024 if_objectisnumber.JoinContinuation(&found); 2025 } 2026 } 2027 if_objectissmi.JoinContinuation(&found); 2028 2029 // Check for cache hit. 2030 IfBuilder if_found(this, &found); 2031 if_found.Then(); 2032 { 2033 // Count number to string operation in native code. 2034 AddIncrementCounter(isolate()->counters()->number_to_string_native()); 2035 2036 // Load the value in case of cache hit. 2037 HValue* key_index = Pop(); 2038 HValue* value_index = AddUncasted<HAdd>(key_index, graph()->GetConstant1()); 2039 Push(Add<HLoadKeyed>(number_string_cache, value_index, nullptr, nullptr, 2040 FAST_ELEMENTS, ALLOW_RETURN_HOLE)); 2041 } 2042 if_found.Else(); 2043 { 2044 // Cache miss, fallback to runtime. 2045 Add<HPushArguments>(object); 2046 Push(Add<HCallRuntime>( 2047 Runtime::FunctionForId(Runtime::kNumberToStringSkipCache), 2048 1)); 2049 } 2050 if_found.End(); 2051 2052 return Pop(); 2053 } 2054 2055 2056 HValue* HGraphBuilder::BuildToObject(HValue* receiver) { 2057 NoObservableSideEffectsScope scope(this); 2058 2059 // Create a joinable continuation. 2060 HIfContinuation wrap(graph()->CreateBasicBlock(), 2061 graph()->CreateBasicBlock()); 2062 2063 // Determine the proper global constructor function required to wrap 2064 // {receiver} into a JSValue, unless {receiver} is already a {JSReceiver}, in 2065 // which case we just return it. Deopts to Runtime::kToObject if {receiver} 2066 // is undefined or null. 2067 IfBuilder receiver_is_smi(this); 2068 receiver_is_smi.If<HIsSmiAndBranch>(receiver); 2069 receiver_is_smi.Then(); 2070 { 2071 // Use global Number function. 2072 Push(Add<HConstant>(Context::NUMBER_FUNCTION_INDEX)); 2073 } 2074 receiver_is_smi.Else(); 2075 { 2076 // Determine {receiver} map and instance type. 2077 HValue* receiver_map = 2078 Add<HLoadNamedField>(receiver, nullptr, HObjectAccess::ForMap()); 2079 HValue* receiver_instance_type = Add<HLoadNamedField>( 2080 receiver_map, nullptr, HObjectAccess::ForMapInstanceType()); 2081 2082 // First check whether {receiver} is already a spec object (fast case). 2083 IfBuilder receiver_is_not_spec_object(this); 2084 receiver_is_not_spec_object.If<HCompareNumericAndBranch>( 2085 receiver_instance_type, Add<HConstant>(FIRST_JS_RECEIVER_TYPE), 2086 Token::LT); 2087 receiver_is_not_spec_object.Then(); 2088 { 2089 // Load the constructor function index from the {receiver} map. 2090 HValue* constructor_function_index = Add<HLoadNamedField>( 2091 receiver_map, nullptr, 2092 HObjectAccess::ForMapInObjectPropertiesOrConstructorFunctionIndex()); 2093 2094 // Check if {receiver} has a constructor (null and undefined have no 2095 // constructors, so we deoptimize to the runtime to throw an exception). 2096 IfBuilder constructor_function_index_is_invalid(this); 2097 constructor_function_index_is_invalid.If<HCompareNumericAndBranch>( 2098 constructor_function_index, 2099 Add<HConstant>(Map::kNoConstructorFunctionIndex), Token::EQ); 2100 constructor_function_index_is_invalid.ThenDeopt( 2101 Deoptimizer::kUndefinedOrNullInToObject); 2102 constructor_function_index_is_invalid.End(); 2103 2104 // Use the global constructor function. 2105 Push(constructor_function_index); 2106 } 2107 receiver_is_not_spec_object.JoinContinuation(&wrap); 2108 } 2109 receiver_is_smi.JoinContinuation(&wrap); 2110 2111 // Wrap the receiver if necessary. 2112 IfBuilder if_wrap(this, &wrap); 2113 if_wrap.Then(); 2114 { 2115 // Grab the constructor function index. 2116 HValue* constructor_index = Pop(); 2117 2118 // Load native context. 2119 HValue* native_context = BuildGetNativeContext(); 2120 2121 // Determine the initial map for the global constructor. 2122 HValue* constructor = Add<HLoadKeyed>(native_context, constructor_index, 2123 nullptr, nullptr, FAST_ELEMENTS); 2124 HValue* constructor_initial_map = Add<HLoadNamedField>( 2125 constructor, nullptr, HObjectAccess::ForPrototypeOrInitialMap()); 2126 // Allocate and initialize a JSValue wrapper. 2127 HValue* value = 2128 BuildAllocate(Add<HConstant>(JSValue::kSize), HType::JSObject(), 2129 JS_VALUE_TYPE, HAllocationMode()); 2130 Add<HStoreNamedField>(value, HObjectAccess::ForMap(), 2131 constructor_initial_map); 2132 HValue* empty_fixed_array = Add<HLoadRoot>(Heap::kEmptyFixedArrayRootIndex); 2133 Add<HStoreNamedField>(value, HObjectAccess::ForPropertiesPointer(), 2134 empty_fixed_array); 2135 Add<HStoreNamedField>(value, HObjectAccess::ForElementsPointer(), 2136 empty_fixed_array); 2137 Add<HStoreNamedField>(value, HObjectAccess::ForObservableJSObjectOffset( 2138 JSValue::kValueOffset), 2139 receiver); 2140 Push(value); 2141 } 2142 if_wrap.Else(); 2143 { Push(receiver); } 2144 if_wrap.End(); 2145 return Pop(); 2146 } 2147 2148 2149 HAllocate* HGraphBuilder::BuildAllocate( 2150 HValue* object_size, 2151 HType type, 2152 InstanceType instance_type, 2153 HAllocationMode allocation_mode) { 2154 // Compute the effective allocation size. 2155 HValue* size = object_size; 2156 if (allocation_mode.CreateAllocationMementos()) { 2157 size = AddUncasted<HAdd>(size, Add<HConstant>(AllocationMemento::kSize)); 2158 size->ClearFlag(HValue::kCanOverflow); 2159 } 2160 2161 // Perform the actual allocation. 2162 HAllocate* object = Add<HAllocate>( 2163 size, type, allocation_mode.GetPretenureMode(), 2164 instance_type, allocation_mode.feedback_site()); 2165 2166 // Setup the allocation memento. 2167 if (allocation_mode.CreateAllocationMementos()) { 2168 BuildCreateAllocationMemento( 2169 object, object_size, allocation_mode.current_site()); 2170 } 2171 2172 return object; 2173 } 2174 2175 2176 HValue* HGraphBuilder::BuildAddStringLengths(HValue* left_length, 2177 HValue* right_length) { 2178 // Compute the combined string length and check against max string length. 2179 HValue* length = AddUncasted<HAdd>(left_length, right_length); 2180 // Check that length <= kMaxLength <=> length < MaxLength + 1. 2181 HValue* max_length = Add<HConstant>(String::kMaxLength + 1); 2182 Add<HBoundsCheck>(length, max_length); 2183 return length; 2184 } 2185 2186 2187 HValue* HGraphBuilder::BuildCreateConsString( 2188 HValue* length, 2189 HValue* left, 2190 HValue* right, 2191 HAllocationMode allocation_mode) { 2192 // Determine the string instance types. 2193 HInstruction* left_instance_type = AddLoadStringInstanceType(left); 2194 HInstruction* right_instance_type = AddLoadStringInstanceType(right); 2195 2196 // Allocate the cons string object. HAllocate does not care whether we 2197 // pass CONS_STRING_TYPE or CONS_ONE_BYTE_STRING_TYPE here, so we just use 2198 // CONS_STRING_TYPE here. Below we decide whether the cons string is 2199 // one-byte or two-byte and set the appropriate map. 2200 DCHECK(HAllocate::CompatibleInstanceTypes(CONS_STRING_TYPE, 2201 CONS_ONE_BYTE_STRING_TYPE)); 2202 HAllocate* result = BuildAllocate(Add<HConstant>(ConsString::kSize), 2203 HType::String(), CONS_STRING_TYPE, 2204 allocation_mode); 2205 2206 // Compute intersection and difference of instance types. 2207 HValue* anded_instance_types = AddUncasted<HBitwise>( 2208 Token::BIT_AND, left_instance_type, right_instance_type); 2209 HValue* xored_instance_types = AddUncasted<HBitwise>( 2210 Token::BIT_XOR, left_instance_type, right_instance_type); 2211 2212 // We create a one-byte cons string if 2213 // 1. both strings are one-byte, or 2214 // 2. at least one of the strings is two-byte, but happens to contain only 2215 // one-byte characters. 2216 // To do this, we check 2217 // 1. if both strings are one-byte, or if the one-byte data hint is set in 2218 // both strings, or 2219 // 2. if one of the strings has the one-byte data hint set and the other 2220 // string is one-byte. 2221 IfBuilder if_onebyte(this); 2222 STATIC_ASSERT(kOneByteStringTag != 0); 2223 STATIC_ASSERT(kOneByteDataHintMask != 0); 2224 if_onebyte.If<HCompareNumericAndBranch>( 2225 AddUncasted<HBitwise>( 2226 Token::BIT_AND, anded_instance_types, 2227 Add<HConstant>(static_cast<int32_t>( 2228 kStringEncodingMask | kOneByteDataHintMask))), 2229 graph()->GetConstant0(), Token::NE); 2230 if_onebyte.Or(); 2231 STATIC_ASSERT(kOneByteStringTag != 0 && 2232 kOneByteDataHintTag != 0 && 2233 kOneByteDataHintTag != kOneByteStringTag); 2234 if_onebyte.If<HCompareNumericAndBranch>( 2235 AddUncasted<HBitwise>( 2236 Token::BIT_AND, xored_instance_types, 2237 Add<HConstant>(static_cast<int32_t>( 2238 kOneByteStringTag | kOneByteDataHintTag))), 2239 Add<HConstant>(static_cast<int32_t>( 2240 kOneByteStringTag | kOneByteDataHintTag)), Token::EQ); 2241 if_onebyte.Then(); 2242 { 2243 // We can safely skip the write barrier for storing the map here. 2244 Add<HStoreNamedField>( 2245 result, HObjectAccess::ForMap(), 2246 Add<HConstant>(isolate()->factory()->cons_one_byte_string_map())); 2247 } 2248 if_onebyte.Else(); 2249 { 2250 // We can safely skip the write barrier for storing the map here. 2251 Add<HStoreNamedField>( 2252 result, HObjectAccess::ForMap(), 2253 Add<HConstant>(isolate()->factory()->cons_string_map())); 2254 } 2255 if_onebyte.End(); 2256 2257 // Initialize the cons string fields. 2258 Add<HStoreNamedField>(result, HObjectAccess::ForStringHashField(), 2259 Add<HConstant>(String::kEmptyHashField)); 2260 Add<HStoreNamedField>(result, HObjectAccess::ForStringLength(), length); 2261 Add<HStoreNamedField>(result, HObjectAccess::ForConsStringFirst(), left); 2262 Add<HStoreNamedField>(result, HObjectAccess::ForConsStringSecond(), right); 2263 2264 // Count the native string addition. 2265 AddIncrementCounter(isolate()->counters()->string_add_native()); 2266 2267 return result; 2268 } 2269 2270 2271 void HGraphBuilder::BuildCopySeqStringChars(HValue* src, 2272 HValue* src_offset, 2273 String::Encoding src_encoding, 2274 HValue* dst, 2275 HValue* dst_offset, 2276 String::Encoding dst_encoding, 2277 HValue* length) { 2278 DCHECK(dst_encoding != String::ONE_BYTE_ENCODING || 2279 src_encoding == String::ONE_BYTE_ENCODING); 2280 LoopBuilder loop(this, context(), LoopBuilder::kPostIncrement); 2281 HValue* index = loop.BeginBody(graph()->GetConstant0(), length, Token::LT); 2282 { 2283 HValue* src_index = AddUncasted<HAdd>(src_offset, index); 2284 HValue* value = 2285 AddUncasted<HSeqStringGetChar>(src_encoding, src, src_index); 2286 HValue* dst_index = AddUncasted<HAdd>(dst_offset, index); 2287 Add<HSeqStringSetChar>(dst_encoding, dst, dst_index, value); 2288 } 2289 loop.EndBody(); 2290 } 2291 2292 2293 HValue* HGraphBuilder::BuildObjectSizeAlignment( 2294 HValue* unaligned_size, int header_size) { 2295 DCHECK((header_size & kObjectAlignmentMask) == 0); 2296 HValue* size = AddUncasted<HAdd>( 2297 unaligned_size, Add<HConstant>(static_cast<int32_t>( 2298 header_size + kObjectAlignmentMask))); 2299 size->ClearFlag(HValue::kCanOverflow); 2300 return AddUncasted<HBitwise>( 2301 Token::BIT_AND, size, Add<HConstant>(static_cast<int32_t>( 2302 ~kObjectAlignmentMask))); 2303 } 2304 2305 2306 HValue* HGraphBuilder::BuildUncheckedStringAdd( 2307 HValue* left, 2308 HValue* right, 2309 HAllocationMode allocation_mode) { 2310 // Determine the string lengths. 2311 HValue* left_length = AddLoadStringLength(left); 2312 HValue* right_length = AddLoadStringLength(right); 2313 2314 // Compute the combined string length. 2315 HValue* length = BuildAddStringLengths(left_length, right_length); 2316 2317 // Do some manual constant folding here. 2318 if (left_length->IsConstant()) { 2319 HConstant* c_left_length = HConstant::cast(left_length); 2320 DCHECK_NE(0, c_left_length->Integer32Value()); 2321 if (c_left_length->Integer32Value() + 1 >= ConsString::kMinLength) { 2322 // The right string contains at least one character. 2323 return BuildCreateConsString(length, left, right, allocation_mode); 2324 } 2325 } else if (right_length->IsConstant()) { 2326 HConstant* c_right_length = HConstant::cast(right_length); 2327 DCHECK_NE(0, c_right_length->Integer32Value()); 2328 if (c_right_length->Integer32Value() + 1 >= ConsString::kMinLength) { 2329 // The left string contains at least one character. 2330 return BuildCreateConsString(length, left, right, allocation_mode); 2331 } 2332 } 2333 2334 // Check if we should create a cons string. 2335 IfBuilder if_createcons(this); 2336 if_createcons.If<HCompareNumericAndBranch>( 2337 length, Add<HConstant>(ConsString::kMinLength), Token::GTE); 2338 if_createcons.Then(); 2339 { 2340 // Create a cons string. 2341 Push(BuildCreateConsString(length, left, right, allocation_mode)); 2342 } 2343 if_createcons.Else(); 2344 { 2345 // Determine the string instance types. 2346 HValue* left_instance_type = AddLoadStringInstanceType(left); 2347 HValue* right_instance_type = AddLoadStringInstanceType(right); 2348 2349 // Compute union and difference of instance types. 2350 HValue* ored_instance_types = AddUncasted<HBitwise>( 2351 Token::BIT_OR, left_instance_type, right_instance_type); 2352 HValue* xored_instance_types = AddUncasted<HBitwise>( 2353 Token::BIT_XOR, left_instance_type, right_instance_type); 2354 2355 // Check if both strings have the same encoding and both are 2356 // sequential. 2357 IfBuilder if_sameencodingandsequential(this); 2358 if_sameencodingandsequential.If<HCompareNumericAndBranch>( 2359 AddUncasted<HBitwise>( 2360 Token::BIT_AND, xored_instance_types, 2361 Add<HConstant>(static_cast<int32_t>(kStringEncodingMask))), 2362 graph()->GetConstant0(), Token::EQ); 2363 if_sameencodingandsequential.And(); 2364 STATIC_ASSERT(kSeqStringTag == 0); 2365 if_sameencodingandsequential.If<HCompareNumericAndBranch>( 2366 AddUncasted<HBitwise>( 2367 Token::BIT_AND, ored_instance_types, 2368 Add<HConstant>(static_cast<int32_t>(kStringRepresentationMask))), 2369 graph()->GetConstant0(), Token::EQ); 2370 if_sameencodingandsequential.Then(); 2371 { 2372 HConstant* string_map = 2373 Add<HConstant>(isolate()->factory()->string_map()); 2374 HConstant* one_byte_string_map = 2375 Add<HConstant>(isolate()->factory()->one_byte_string_map()); 2376 2377 // Determine map and size depending on whether result is one-byte string. 2378 IfBuilder if_onebyte(this); 2379 STATIC_ASSERT(kOneByteStringTag != 0); 2380 if_onebyte.If<HCompareNumericAndBranch>( 2381 AddUncasted<HBitwise>( 2382 Token::BIT_AND, ored_instance_types, 2383 Add<HConstant>(static_cast<int32_t>(kStringEncodingMask))), 2384 graph()->GetConstant0(), Token::NE); 2385 if_onebyte.Then(); 2386 { 2387 // Allocate sequential one-byte string object. 2388 Push(length); 2389 Push(one_byte_string_map); 2390 } 2391 if_onebyte.Else(); 2392 { 2393 // Allocate sequential two-byte string object. 2394 HValue* size = AddUncasted<HShl>(length, graph()->GetConstant1()); 2395 size->ClearFlag(HValue::kCanOverflow); 2396 size->SetFlag(HValue::kUint32); 2397 Push(size); 2398 Push(string_map); 2399 } 2400 if_onebyte.End(); 2401 HValue* map = Pop(); 2402 2403 // Calculate the number of bytes needed for the characters in the 2404 // string while observing object alignment. 2405 STATIC_ASSERT((SeqString::kHeaderSize & kObjectAlignmentMask) == 0); 2406 HValue* size = BuildObjectSizeAlignment(Pop(), SeqString::kHeaderSize); 2407 2408 IfBuilder if_size(this); 2409 if_size.If<HCompareNumericAndBranch>( 2410 size, Add<HConstant>(Page::kMaxRegularHeapObjectSize), Token::LT); 2411 if_size.Then(); 2412 { 2413 // Allocate the string object. HAllocate does not care whether we pass 2414 // STRING_TYPE or ONE_BYTE_STRING_TYPE here, so we just use STRING_TYPE. 2415 HAllocate* result = 2416 BuildAllocate(size, HType::String(), STRING_TYPE, allocation_mode); 2417 Add<HStoreNamedField>(result, HObjectAccess::ForMap(), map); 2418 2419 // Initialize the string fields. 2420 Add<HStoreNamedField>(result, HObjectAccess::ForStringHashField(), 2421 Add<HConstant>(String::kEmptyHashField)); 2422 Add<HStoreNamedField>(result, HObjectAccess::ForStringLength(), length); 2423 2424 // Copy characters to the result string. 2425 IfBuilder if_twobyte(this); 2426 if_twobyte.If<HCompareObjectEqAndBranch>(map, string_map); 2427 if_twobyte.Then(); 2428 { 2429 // Copy characters from the left string. 2430 BuildCopySeqStringChars( 2431 left, graph()->GetConstant0(), String::TWO_BYTE_ENCODING, result, 2432 graph()->GetConstant0(), String::TWO_BYTE_ENCODING, left_length); 2433 2434 // Copy characters from the right string. 2435 BuildCopySeqStringChars( 2436 right, graph()->GetConstant0(), String::TWO_BYTE_ENCODING, result, 2437 left_length, String::TWO_BYTE_ENCODING, right_length); 2438 } 2439 if_twobyte.Else(); 2440 { 2441 // Copy characters from the left string. 2442 BuildCopySeqStringChars( 2443 left, graph()->GetConstant0(), String::ONE_BYTE_ENCODING, result, 2444 graph()->GetConstant0(), String::ONE_BYTE_ENCODING, left_length); 2445 2446 // Copy characters from the right string. 2447 BuildCopySeqStringChars( 2448 right, graph()->GetConstant0(), String::ONE_BYTE_ENCODING, result, 2449 left_length, String::ONE_BYTE_ENCODING, right_length); 2450 } 2451 if_twobyte.End(); 2452 2453 // Count the native string addition. 2454 AddIncrementCounter(isolate()->counters()->string_add_native()); 2455 2456 // Return the sequential string. 2457 Push(result); 2458 } 2459 if_size.Else(); 2460 { 2461 // Fallback to the runtime to add the two strings. The string has to be 2462 // allocated in LO space. 2463 Add<HPushArguments>(left, right); 2464 Push(Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kStringAdd), 2)); 2465 } 2466 if_size.End(); 2467 } 2468 if_sameencodingandsequential.Else(); 2469 { 2470 // Fallback to the runtime to add the two strings. 2471 Add<HPushArguments>(left, right); 2472 Push(Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kStringAdd), 2)); 2473 } 2474 if_sameencodingandsequential.End(); 2475 } 2476 if_createcons.End(); 2477 2478 return Pop(); 2479 } 2480 2481 2482 HValue* HGraphBuilder::BuildStringAdd( 2483 HValue* left, 2484 HValue* right, 2485 HAllocationMode allocation_mode) { 2486 NoObservableSideEffectsScope no_effects(this); 2487 2488 // Determine string lengths. 2489 HValue* left_length = AddLoadStringLength(left); 2490 HValue* right_length = AddLoadStringLength(right); 2491 2492 // Check if left string is empty. 2493 IfBuilder if_leftempty(this); 2494 if_leftempty.If<HCompareNumericAndBranch>( 2495 left_length, graph()->GetConstant0(), Token::EQ); 2496 if_leftempty.Then(); 2497 { 2498 // Count the native string addition. 2499 AddIncrementCounter(isolate()->counters()->string_add_native()); 2500 2501 // Just return the right string. 2502 Push(right); 2503 } 2504 if_leftempty.Else(); 2505 { 2506 // Check if right string is empty. 2507 IfBuilder if_rightempty(this); 2508 if_rightempty.If<HCompareNumericAndBranch>( 2509 right_length, graph()->GetConstant0(), Token::EQ); 2510 if_rightempty.Then(); 2511 { 2512 // Count the native string addition. 2513 AddIncrementCounter(isolate()->counters()->string_add_native()); 2514 2515 // Just return the left string. 2516 Push(left); 2517 } 2518 if_rightempty.Else(); 2519 { 2520 // Add the two non-empty strings. 2521 Push(BuildUncheckedStringAdd(left, right, allocation_mode)); 2522 } 2523 if_rightempty.End(); 2524 } 2525 if_leftempty.End(); 2526 2527 return Pop(); 2528 } 2529 2530 2531 HInstruction* HGraphBuilder::BuildUncheckedMonomorphicElementAccess( 2532 HValue* checked_object, 2533 HValue* key, 2534 HValue* val, 2535 bool is_js_array, 2536 ElementsKind elements_kind, 2537 PropertyAccessType access_type, 2538 LoadKeyedHoleMode load_mode, 2539 KeyedAccessStoreMode store_mode) { 2540 DCHECK(top_info()->IsStub() || checked_object->IsCompareMap() || 2541 checked_object->IsCheckMaps()); 2542 DCHECK(!IsFixedTypedArrayElementsKind(elements_kind) || !is_js_array); 2543 // No GVNFlag is necessary for ElementsKind if there is an explicit dependency 2544 // on a HElementsTransition instruction. The flag can also be removed if the 2545 // map to check has FAST_HOLEY_ELEMENTS, since there can be no further 2546 // ElementsKind transitions. Finally, the dependency can be removed for stores 2547 // for FAST_ELEMENTS, since a transition to HOLEY elements won't change the 2548 // generated store code. 2549 if ((elements_kind == FAST_HOLEY_ELEMENTS) || 2550 (elements_kind == FAST_ELEMENTS && access_type == STORE)) { 2551 checked_object->ClearDependsOnFlag(kElementsKind); 2552 } 2553 2554 bool fast_smi_only_elements = IsFastSmiElementsKind(elements_kind); 2555 bool fast_elements = IsFastObjectElementsKind(elements_kind); 2556 HValue* elements = AddLoadElements(checked_object); 2557 if (access_type == STORE && (fast_elements || fast_smi_only_elements) && 2558 store_mode != STORE_NO_TRANSITION_HANDLE_COW) { 2559 HCheckMaps* check_cow_map = Add<HCheckMaps>( 2560 elements, isolate()->factory()->fixed_array_map()); 2561 check_cow_map->ClearDependsOnFlag(kElementsKind); 2562 } 2563 HInstruction* length = NULL; 2564 if (is_js_array) { 2565 length = Add<HLoadNamedField>( 2566 checked_object->ActualValue(), checked_object, 2567 HObjectAccess::ForArrayLength(elements_kind)); 2568 } else { 2569 length = AddLoadFixedArrayLength(elements); 2570 } 2571 length->set_type(HType::Smi()); 2572 HValue* checked_key = NULL; 2573 if (IsFixedTypedArrayElementsKind(elements_kind)) { 2574 checked_object = Add<HCheckArrayBufferNotNeutered>(checked_object); 2575 2576 HValue* external_pointer = Add<HLoadNamedField>( 2577 elements, nullptr, 2578 HObjectAccess::ForFixedTypedArrayBaseExternalPointer()); 2579 HValue* base_pointer = Add<HLoadNamedField>( 2580 elements, nullptr, HObjectAccess::ForFixedTypedArrayBaseBasePointer()); 2581 HValue* backing_store = AddUncasted<HAdd>( 2582 external_pointer, base_pointer, Strength::WEAK, AddOfExternalAndTagged); 2583 2584 if (store_mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS) { 2585 NoObservableSideEffectsScope no_effects(this); 2586 IfBuilder length_checker(this); 2587 length_checker.If<HCompareNumericAndBranch>(key, length, Token::LT); 2588 length_checker.Then(); 2589 IfBuilder negative_checker(this); 2590 HValue* bounds_check = negative_checker.If<HCompareNumericAndBranch>( 2591 key, graph()->GetConstant0(), Token::GTE); 2592 negative_checker.Then(); 2593 HInstruction* result = AddElementAccess( 2594 backing_store, key, val, bounds_check, checked_object->ActualValue(), 2595 elements_kind, access_type); 2596 negative_checker.ElseDeopt(Deoptimizer::kNegativeKeyEncountered); 2597 negative_checker.End(); 2598 length_checker.End(); 2599 return result; 2600 } else { 2601 DCHECK(store_mode == STANDARD_STORE); 2602 checked_key = Add<HBoundsCheck>(key, length); 2603 return AddElementAccess(backing_store, checked_key, val, checked_object, 2604 checked_object->ActualValue(), elements_kind, 2605 access_type); 2606 } 2607 } 2608 DCHECK(fast_smi_only_elements || 2609 fast_elements || 2610 IsFastDoubleElementsKind(elements_kind)); 2611 2612 // In case val is stored into a fast smi array, assure that the value is a smi 2613 // before manipulating the backing store. Otherwise the actual store may 2614 // deopt, leaving the backing store in an invalid state. 2615 if (access_type == STORE && IsFastSmiElementsKind(elements_kind) && 2616 !val->type().IsSmi()) { 2617 val = AddUncasted<HForceRepresentation>(val, Representation::Smi()); 2618 } 2619 2620 if (IsGrowStoreMode(store_mode)) { 2621 NoObservableSideEffectsScope no_effects(this); 2622 Representation representation = HStoreKeyed::RequiredValueRepresentation( 2623 elements_kind, STORE_TO_INITIALIZED_ENTRY); 2624 val = AddUncasted<HForceRepresentation>(val, representation); 2625 elements = BuildCheckForCapacityGrow(checked_object, elements, 2626 elements_kind, length, key, 2627 is_js_array, access_type); 2628 checked_key = key; 2629 } else { 2630 checked_key = Add<HBoundsCheck>(key, length); 2631 2632 if (access_type == STORE && (fast_elements || fast_smi_only_elements)) { 2633 if (store_mode == STORE_NO_TRANSITION_HANDLE_COW) { 2634 NoObservableSideEffectsScope no_effects(this); 2635 elements = BuildCopyElementsOnWrite(checked_object, elements, 2636 elements_kind, length); 2637 } else { 2638 HCheckMaps* check_cow_map = Add<HCheckMaps>( 2639 elements, isolate()->factory()->fixed_array_map()); 2640 check_cow_map->ClearDependsOnFlag(kElementsKind); 2641 } 2642 } 2643 } 2644 return AddElementAccess(elements, checked_key, val, checked_object, nullptr, 2645 elements_kind, access_type, load_mode); 2646 } 2647 2648 2649 HValue* HGraphBuilder::BuildAllocateArrayFromLength( 2650 JSArrayBuilder* array_builder, 2651 HValue* length_argument) { 2652 if (length_argument->IsConstant() && 2653 HConstant::cast(length_argument)->HasSmiValue()) { 2654 int array_length = HConstant::cast(length_argument)->Integer32Value(); 2655 if (array_length == 0) { 2656 return array_builder->AllocateEmptyArray(); 2657 } else { 2658 return array_builder->AllocateArray(length_argument, 2659 array_length, 2660 length_argument); 2661 } 2662 } 2663 2664 HValue* constant_zero = graph()->GetConstant0(); 2665 HConstant* max_alloc_length = 2666 Add<HConstant>(JSArray::kInitialMaxFastElementArray); 2667 HInstruction* checked_length = Add<HBoundsCheck>(length_argument, 2668 max_alloc_length); 2669 IfBuilder if_builder(this); 2670 if_builder.If<HCompareNumericAndBranch>(checked_length, constant_zero, 2671 Token::EQ); 2672 if_builder.Then(); 2673 const int initial_capacity = JSArray::kPreallocatedArrayElements; 2674 HConstant* initial_capacity_node = Add<HConstant>(initial_capacity); 2675 Push(initial_capacity_node); // capacity 2676 Push(constant_zero); // length 2677 if_builder.Else(); 2678 if (!(top_info()->IsStub()) && 2679 IsFastPackedElementsKind(array_builder->kind())) { 2680 // We'll come back later with better (holey) feedback. 2681 if_builder.Deopt( 2682 Deoptimizer::kHoleyArrayDespitePackedElements_kindFeedback); 2683 } else { 2684 Push(checked_length); // capacity 2685 Push(checked_length); // length 2686 } 2687 if_builder.End(); 2688 2689 // Figure out total size 2690 HValue* length = Pop(); 2691 HValue* capacity = Pop(); 2692 return array_builder->AllocateArray(capacity, max_alloc_length, length); 2693 } 2694 2695 2696 HValue* HGraphBuilder::BuildCalculateElementsSize(ElementsKind kind, 2697 HValue* capacity) { 2698 int elements_size = IsFastDoubleElementsKind(kind) 2699 ? kDoubleSize 2700 : kPointerSize; 2701 2702 HConstant* elements_size_value = Add<HConstant>(elements_size); 2703 HInstruction* mul = 2704 HMul::NewImul(isolate(), zone(), context(), capacity->ActualValue(), 2705 elements_size_value); 2706 AddInstruction(mul); 2707 mul->ClearFlag(HValue::kCanOverflow); 2708 2709 STATIC_ASSERT(FixedDoubleArray::kHeaderSize == FixedArray::kHeaderSize); 2710 2711 HConstant* header_size = Add<HConstant>(FixedArray::kHeaderSize); 2712 HValue* total_size = AddUncasted<HAdd>(mul, header_size); 2713 total_size->ClearFlag(HValue::kCanOverflow); 2714 return total_size; 2715 } 2716 2717 2718 HAllocate* HGraphBuilder::AllocateJSArrayObject(AllocationSiteMode mode) { 2719 int base_size = JSArray::kSize; 2720 if (mode == TRACK_ALLOCATION_SITE) { 2721 base_size += AllocationMemento::kSize; 2722 } 2723 HConstant* size_in_bytes = Add<HConstant>(base_size); 2724 return Add<HAllocate>( 2725 size_in_bytes, HType::JSArray(), NOT_TENURED, JS_OBJECT_TYPE); 2726 } 2727 2728 2729 HConstant* HGraphBuilder::EstablishElementsAllocationSize( 2730 ElementsKind kind, 2731 int capacity) { 2732 int base_size = IsFastDoubleElementsKind(kind) 2733 ? FixedDoubleArray::SizeFor(capacity) 2734 : FixedArray::SizeFor(capacity); 2735 2736 return Add<HConstant>(base_size); 2737 } 2738 2739 2740 HAllocate* HGraphBuilder::BuildAllocateElements(ElementsKind kind, 2741 HValue* size_in_bytes) { 2742 InstanceType instance_type = IsFastDoubleElementsKind(kind) 2743 ? FIXED_DOUBLE_ARRAY_TYPE 2744 : FIXED_ARRAY_TYPE; 2745 2746 return Add<HAllocate>(size_in_bytes, HType::HeapObject(), NOT_TENURED, 2747 instance_type); 2748 } 2749 2750 2751 void HGraphBuilder::BuildInitializeElementsHeader(HValue* elements, 2752 ElementsKind kind, 2753 HValue* capacity) { 2754 Factory* factory = isolate()->factory(); 2755 Handle<Map> map = IsFastDoubleElementsKind(kind) 2756 ? factory->fixed_double_array_map() 2757 : factory->fixed_array_map(); 2758 2759 Add<HStoreNamedField>(elements, HObjectAccess::ForMap(), Add<HConstant>(map)); 2760 Add<HStoreNamedField>(elements, HObjectAccess::ForFixedArrayLength(), 2761 capacity); 2762 } 2763 2764 2765 HValue* HGraphBuilder::BuildAllocateAndInitializeArray(ElementsKind kind, 2766 HValue* capacity) { 2767 // The HForceRepresentation is to prevent possible deopt on int-smi 2768 // conversion after allocation but before the new object fields are set. 2769 capacity = AddUncasted<HForceRepresentation>(capacity, Representation::Smi()); 2770 HValue* size_in_bytes = BuildCalculateElementsSize(kind, capacity); 2771 HValue* new_array = BuildAllocateElements(kind, size_in_bytes); 2772 BuildInitializeElementsHeader(new_array, kind, capacity); 2773 return new_array; 2774 } 2775 2776 2777 void HGraphBuilder::BuildJSArrayHeader(HValue* array, 2778 HValue* array_map, 2779 HValue* elements, 2780 AllocationSiteMode mode, 2781 ElementsKind elements_kind, 2782 HValue* allocation_site_payload, 2783 HValue* length_field) { 2784 Add<HStoreNamedField>(array, HObjectAccess::ForMap(), array_map); 2785 2786 HConstant* empty_fixed_array = 2787 Add<HConstant>(isolate()->factory()->empty_fixed_array()); 2788 2789 Add<HStoreNamedField>( 2790 array, HObjectAccess::ForPropertiesPointer(), empty_fixed_array); 2791 2792 Add<HStoreNamedField>( 2793 array, HObjectAccess::ForElementsPointer(), 2794 elements != NULL ? elements : empty_fixed_array); 2795 2796 Add<HStoreNamedField>( 2797 array, HObjectAccess::ForArrayLength(elements_kind), length_field); 2798 2799 if (mode == TRACK_ALLOCATION_SITE) { 2800 BuildCreateAllocationMemento( 2801 array, Add<HConstant>(JSArray::kSize), allocation_site_payload); 2802 } 2803 } 2804 2805 2806 HInstruction* HGraphBuilder::AddElementAccess( 2807 HValue* elements, HValue* checked_key, HValue* val, HValue* dependency, 2808 HValue* backing_store_owner, ElementsKind elements_kind, 2809 PropertyAccessType access_type, LoadKeyedHoleMode load_mode) { 2810 if (access_type == STORE) { 2811 DCHECK(val != NULL); 2812 if (elements_kind == UINT8_CLAMPED_ELEMENTS) { 2813 val = Add<HClampToUint8>(val); 2814 } 2815 return Add<HStoreKeyed>(elements, checked_key, val, backing_store_owner, 2816 elements_kind, STORE_TO_INITIALIZED_ENTRY); 2817 } 2818 2819 DCHECK(access_type == LOAD); 2820 DCHECK(val == NULL); 2821 HLoadKeyed* load = 2822 Add<HLoadKeyed>(elements, checked_key, dependency, backing_store_owner, 2823 elements_kind, load_mode); 2824 if (elements_kind == UINT32_ELEMENTS) { 2825 graph()->RecordUint32Instruction(load); 2826 } 2827 return load; 2828 } 2829 2830 2831 HLoadNamedField* HGraphBuilder::AddLoadMap(HValue* object, 2832 HValue* dependency) { 2833 return Add<HLoadNamedField>(object, dependency, HObjectAccess::ForMap()); 2834 } 2835 2836 2837 HLoadNamedField* HGraphBuilder::AddLoadElements(HValue* object, 2838 HValue* dependency) { 2839 return Add<HLoadNamedField>( 2840 object, dependency, HObjectAccess::ForElementsPointer()); 2841 } 2842 2843 2844 HLoadNamedField* HGraphBuilder::AddLoadFixedArrayLength( 2845 HValue* array, 2846 HValue* dependency) { 2847 return Add<HLoadNamedField>( 2848 array, dependency, HObjectAccess::ForFixedArrayLength()); 2849 } 2850 2851 2852 HLoadNamedField* HGraphBuilder::AddLoadArrayLength(HValue* array, 2853 ElementsKind kind, 2854 HValue* dependency) { 2855 return Add<HLoadNamedField>( 2856 array, dependency, HObjectAccess::ForArrayLength(kind)); 2857 } 2858 2859 2860 HValue* HGraphBuilder::BuildNewElementsCapacity(HValue* old_capacity) { 2861 HValue* half_old_capacity = AddUncasted<HShr>(old_capacity, 2862 graph_->GetConstant1()); 2863 2864 HValue* new_capacity = AddUncasted<HAdd>(half_old_capacity, old_capacity); 2865 new_capacity->ClearFlag(HValue::kCanOverflow); 2866 2867 HValue* min_growth = Add<HConstant>(16); 2868 2869 new_capacity = AddUncasted<HAdd>(new_capacity, min_growth); 2870 new_capacity->ClearFlag(HValue::kCanOverflow); 2871 2872 return new_capacity; 2873 } 2874 2875 2876 HValue* HGraphBuilder::BuildGrowElementsCapacity(HValue* object, 2877 HValue* elements, 2878 ElementsKind kind, 2879 ElementsKind new_kind, 2880 HValue* length, 2881 HValue* new_capacity) { 2882 Add<HBoundsCheck>(new_capacity, Add<HConstant>( 2883 (Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) >> 2884 ElementsKindToShiftSize(new_kind))); 2885 2886 HValue* new_elements = 2887 BuildAllocateAndInitializeArray(new_kind, new_capacity); 2888 2889 BuildCopyElements(elements, kind, new_elements, 2890 new_kind, length, new_capacity); 2891 2892 Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(), 2893 new_elements); 2894 2895 return new_elements; 2896 } 2897 2898 2899 void HGraphBuilder::BuildFillElementsWithValue(HValue* elements, 2900 ElementsKind elements_kind, 2901 HValue* from, 2902 HValue* to, 2903 HValue* value) { 2904 if (to == NULL) { 2905 to = AddLoadFixedArrayLength(elements); 2906 } 2907 2908 // Special loop unfolding case 2909 STATIC_ASSERT(JSArray::kPreallocatedArrayElements <= 2910 kElementLoopUnrollThreshold); 2911 int initial_capacity = -1; 2912 if (from->IsInteger32Constant() && to->IsInteger32Constant()) { 2913 int constant_from = from->GetInteger32Constant(); 2914 int constant_to = to->GetInteger32Constant(); 2915 2916 if (constant_from == 0 && constant_to <= kElementLoopUnrollThreshold) { 2917 initial_capacity = constant_to; 2918 } 2919 } 2920 2921 if (initial_capacity >= 0) { 2922 for (int i = 0; i < initial_capacity; i++) { 2923 HInstruction* key = Add<HConstant>(i); 2924 Add<HStoreKeyed>(elements, key, value, nullptr, elements_kind); 2925 } 2926 } else { 2927 // Carefully loop backwards so that the "from" remains live through the loop 2928 // rather than the to. This often corresponds to keeping length live rather 2929 // then capacity, which helps register allocation, since length is used more 2930 // other than capacity after filling with holes. 2931 LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement); 2932 2933 HValue* key = builder.BeginBody(to, from, Token::GT); 2934 2935 HValue* adjusted_key = AddUncasted<HSub>(key, graph()->GetConstant1()); 2936 adjusted_key->ClearFlag(HValue::kCanOverflow); 2937 2938 Add<HStoreKeyed>(elements, adjusted_key, value, nullptr, elements_kind); 2939 2940 builder.EndBody(); 2941 } 2942 } 2943 2944 2945 void HGraphBuilder::BuildFillElementsWithHole(HValue* elements, 2946 ElementsKind elements_kind, 2947 HValue* from, 2948 HValue* to) { 2949 // Fast elements kinds need to be initialized in case statements below cause a 2950 // garbage collection. 2951 2952 HValue* hole = IsFastSmiOrObjectElementsKind(elements_kind) 2953 ? graph()->GetConstantHole() 2954 : Add<HConstant>(HConstant::kHoleNaN); 2955 2956 // Since we're about to store a hole value, the store instruction below must 2957 // assume an elements kind that supports heap object values. 2958 if (IsFastSmiOrObjectElementsKind(elements_kind)) { 2959 elements_kind = FAST_HOLEY_ELEMENTS; 2960 } 2961 2962 BuildFillElementsWithValue(elements, elements_kind, from, to, hole); 2963 } 2964 2965 2966 void HGraphBuilder::BuildCopyProperties(HValue* from_properties, 2967 HValue* to_properties, HValue* length, 2968 HValue* capacity) { 2969 ElementsKind kind = FAST_ELEMENTS; 2970 2971 BuildFillElementsWithValue(to_properties, kind, length, capacity, 2972 graph()->GetConstantUndefined()); 2973 2974 LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement); 2975 2976 HValue* key = builder.BeginBody(length, graph()->GetConstant0(), Token::GT); 2977 2978 key = AddUncasted<HSub>(key, graph()->GetConstant1()); 2979 key->ClearFlag(HValue::kCanOverflow); 2980 2981 HValue* element = 2982 Add<HLoadKeyed>(from_properties, key, nullptr, nullptr, kind); 2983 2984 Add<HStoreKeyed>(to_properties, key, element, nullptr, kind); 2985 2986 builder.EndBody(); 2987 } 2988 2989 2990 void HGraphBuilder::BuildCopyElements(HValue* from_elements, 2991 ElementsKind from_elements_kind, 2992 HValue* to_elements, 2993 ElementsKind to_elements_kind, 2994 HValue* length, 2995 HValue* capacity) { 2996 int constant_capacity = -1; 2997 if (capacity != NULL && 2998 capacity->IsConstant() && 2999 HConstant::cast(capacity)->HasInteger32Value()) { 3000 int constant_candidate = HConstant::cast(capacity)->Integer32Value(); 3001 if (constant_candidate <= kElementLoopUnrollThreshold) { 3002 constant_capacity = constant_candidate; 3003 } 3004 } 3005 3006 bool pre_fill_with_holes = 3007 IsFastDoubleElementsKind(from_elements_kind) && 3008 IsFastObjectElementsKind(to_elements_kind); 3009 if (pre_fill_with_holes) { 3010 // If the copy might trigger a GC, make sure that the FixedArray is 3011 // pre-initialized with holes to make sure that it's always in a 3012 // consistent state. 3013 BuildFillElementsWithHole(to_elements, to_elements_kind, 3014 graph()->GetConstant0(), NULL); 3015 } 3016 3017 if (constant_capacity != -1) { 3018 // Unroll the loop for small elements kinds. 3019 for (int i = 0; i < constant_capacity; i++) { 3020 HValue* key_constant = Add<HConstant>(i); 3021 HInstruction* value = Add<HLoadKeyed>( 3022 from_elements, key_constant, nullptr, nullptr, from_elements_kind); 3023 Add<HStoreKeyed>(to_elements, key_constant, value, nullptr, 3024 to_elements_kind); 3025 } 3026 } else { 3027 if (!pre_fill_with_holes && 3028 (capacity == NULL || !length->Equals(capacity))) { 3029 BuildFillElementsWithHole(to_elements, to_elements_kind, 3030 length, NULL); 3031 } 3032 3033 LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement); 3034 3035 HValue* key = builder.BeginBody(length, graph()->GetConstant0(), 3036 Token::GT); 3037 3038 key = AddUncasted<HSub>(key, graph()->GetConstant1()); 3039 key->ClearFlag(HValue::kCanOverflow); 3040 3041 HValue* element = Add<HLoadKeyed>(from_elements, key, nullptr, nullptr, 3042 from_elements_kind, ALLOW_RETURN_HOLE); 3043 3044 ElementsKind kind = (IsHoleyElementsKind(from_elements_kind) && 3045 IsFastSmiElementsKind(to_elements_kind)) 3046 ? FAST_HOLEY_ELEMENTS : to_elements_kind; 3047 3048 if (IsHoleyElementsKind(from_elements_kind) && 3049 from_elements_kind != to_elements_kind) { 3050 IfBuilder if_hole(this); 3051 if_hole.If<HCompareHoleAndBranch>(element); 3052 if_hole.Then(); 3053 HConstant* hole_constant = IsFastDoubleElementsKind(to_elements_kind) 3054 ? Add<HConstant>(HConstant::kHoleNaN) 3055 : graph()->GetConstantHole(); 3056 Add<HStoreKeyed>(to_elements, key, hole_constant, nullptr, kind); 3057 if_hole.Else(); 3058 HStoreKeyed* store = 3059 Add<HStoreKeyed>(to_elements, key, element, nullptr, kind); 3060 store->SetFlag(HValue::kAllowUndefinedAsNaN); 3061 if_hole.End(); 3062 } else { 3063 HStoreKeyed* store = 3064 Add<HStoreKeyed>(to_elements, key, element, nullptr, kind); 3065 store->SetFlag(HValue::kAllowUndefinedAsNaN); 3066 } 3067 3068 builder.EndBody(); 3069 } 3070 3071 Counters* counters = isolate()->counters(); 3072 AddIncrementCounter(counters->inlined_copied_elements()); 3073 } 3074 3075 3076 HValue* HGraphBuilder::BuildCloneShallowArrayCow(HValue* boilerplate, 3077 HValue* allocation_site, 3078 AllocationSiteMode mode, 3079 ElementsKind kind) { 3080 HAllocate* array = AllocateJSArrayObject(mode); 3081 3082 HValue* map = AddLoadMap(boilerplate); 3083 HValue* elements = AddLoadElements(boilerplate); 3084 HValue* length = AddLoadArrayLength(boilerplate, kind); 3085 3086 BuildJSArrayHeader(array, 3087 map, 3088 elements, 3089 mode, 3090 FAST_ELEMENTS, 3091 allocation_site, 3092 length); 3093 return array; 3094 } 3095 3096 3097 HValue* HGraphBuilder::BuildCloneShallowArrayEmpty(HValue* boilerplate, 3098 HValue* allocation_site, 3099 AllocationSiteMode mode) { 3100 HAllocate* array = AllocateJSArrayObject(mode); 3101 3102 HValue* map = AddLoadMap(boilerplate); 3103 3104 BuildJSArrayHeader(array, 3105 map, 3106 NULL, // set elements to empty fixed array 3107 mode, 3108 FAST_ELEMENTS, 3109 allocation_site, 3110 graph()->GetConstant0()); 3111 return array; 3112 } 3113 3114 3115 HValue* HGraphBuilder::BuildCloneShallowArrayNonEmpty(HValue* boilerplate, 3116 HValue* allocation_site, 3117 AllocationSiteMode mode, 3118 ElementsKind kind) { 3119 HValue* boilerplate_elements = AddLoadElements(boilerplate); 3120 HValue* capacity = AddLoadFixedArrayLength(boilerplate_elements); 3121 3122 // Generate size calculation code here in order to make it dominate 3123 // the JSArray allocation. 3124 HValue* elements_size = BuildCalculateElementsSize(kind, capacity); 3125 3126 // Create empty JSArray object for now, store elimination should remove 3127 // redundant initialization of elements and length fields and at the same 3128 // time the object will be fully prepared for GC if it happens during 3129 // elements allocation. 3130 HValue* result = BuildCloneShallowArrayEmpty( 3131 boilerplate, allocation_site, mode); 3132 3133 HAllocate* elements = BuildAllocateElements(kind, elements_size); 3134 3135 // This function implicitly relies on the fact that the 3136 // FastCloneShallowArrayStub is called only for literals shorter than 3137 // JSArray::kInitialMaxFastElementArray. 3138 // Can't add HBoundsCheck here because otherwise the stub will eager a frame. 3139 HConstant* size_upper_bound = EstablishElementsAllocationSize( 3140 kind, JSArray::kInitialMaxFastElementArray); 3141 elements->set_size_upper_bound(size_upper_bound); 3142 3143 Add<HStoreNamedField>(result, HObjectAccess::ForElementsPointer(), elements); 3144 3145 // The allocation for the cloned array above causes register pressure on 3146 // machines with low register counts. Force a reload of the boilerplate 3147 // elements here to free up a register for the allocation to avoid unnecessary 3148 // spillage. 3149 boilerplate_elements = AddLoadElements(boilerplate); 3150 boilerplate_elements->SetFlag(HValue::kCantBeReplaced); 3151 3152 // Copy the elements array header. 3153 for (int i = 0; i < FixedArrayBase::kHeaderSize; i += kPointerSize) { 3154 HObjectAccess access = HObjectAccess::ForFixedArrayHeader(i); 3155 Add<HStoreNamedField>( 3156 elements, access, 3157 Add<HLoadNamedField>(boilerplate_elements, nullptr, access)); 3158 } 3159 3160 // And the result of the length 3161 HValue* length = AddLoadArrayLength(boilerplate, kind); 3162 Add<HStoreNamedField>(result, HObjectAccess::ForArrayLength(kind), length); 3163 3164 BuildCopyElements(boilerplate_elements, kind, elements, 3165 kind, length, NULL); 3166 return result; 3167 } 3168 3169 3170 void HGraphBuilder::BuildCompareNil(HValue* value, Type* type, 3171 HIfContinuation* continuation, 3172 MapEmbedding map_embedding) { 3173 IfBuilder if_nil(this); 3174 bool some_case_handled = false; 3175 bool some_case_missing = false; 3176 3177 if (type->Maybe(Type::Null())) { 3178 if (some_case_handled) if_nil.Or(); 3179 if_nil.If<HCompareObjectEqAndBranch>(value, graph()->GetConstantNull()); 3180 some_case_handled = true; 3181 } else { 3182 some_case_missing = true; 3183 } 3184 3185 if (type->Maybe(Type::Undefined())) { 3186 if (some_case_handled) if_nil.Or(); 3187 if_nil.If<HCompareObjectEqAndBranch>(value, 3188 graph()->GetConstantUndefined()); 3189 some_case_handled = true; 3190 } else { 3191 some_case_missing = true; 3192 } 3193 3194 if (type->Maybe(Type::Undetectable())) { 3195 if (some_case_handled) if_nil.Or(); 3196 if_nil.If<HIsUndetectableAndBranch>(value); 3197 some_case_handled = true; 3198 } else { 3199 some_case_missing = true; 3200 } 3201 3202 if (some_case_missing) { 3203 if_nil.Then(); 3204 if_nil.Else(); 3205 if (type->NumClasses() == 1) { 3206 BuildCheckHeapObject(value); 3207 // For ICs, the map checked below is a sentinel map that gets replaced by 3208 // the monomorphic map when the code is used as a template to generate a 3209 // new IC. For optimized functions, there is no sentinel map, the map 3210 // emitted below is the actual monomorphic map. 3211 if (map_embedding == kEmbedMapsViaWeakCells) { 3212 HValue* cell = 3213 Add<HConstant>(Map::WeakCellForMap(type->Classes().Current())); 3214 HValue* expected_map = Add<HLoadNamedField>( 3215 cell, nullptr, HObjectAccess::ForWeakCellValue()); 3216 HValue* map = 3217 Add<HLoadNamedField>(value, nullptr, HObjectAccess::ForMap()); 3218 IfBuilder map_check(this); 3219 map_check.IfNot<HCompareObjectEqAndBranch>(expected_map, map); 3220 map_check.ThenDeopt(Deoptimizer::kUnknownMap); 3221 map_check.End(); 3222 } else { 3223 DCHECK(map_embedding == kEmbedMapsDirectly); 3224 Add<HCheckMaps>(value, type->Classes().Current()); 3225 } 3226 } else { 3227 if_nil.Deopt(Deoptimizer::kTooManyUndetectableTypes); 3228 } 3229 } 3230 3231 if_nil.CaptureContinuation(continuation); 3232 } 3233 3234 3235 void HGraphBuilder::BuildCreateAllocationMemento( 3236 HValue* previous_object, 3237 HValue* previous_object_size, 3238 HValue* allocation_site) { 3239 DCHECK(allocation_site != NULL); 3240 HInnerAllocatedObject* allocation_memento = Add<HInnerAllocatedObject>( 3241 previous_object, previous_object_size, HType::HeapObject()); 3242 AddStoreMapConstant( 3243 allocation_memento, isolate()->factory()->allocation_memento_map()); 3244 Add<HStoreNamedField>( 3245 allocation_memento, 3246 HObjectAccess::ForAllocationMementoSite(), 3247 allocation_site); 3248 if (FLAG_allocation_site_pretenuring) { 3249 HValue* memento_create_count = 3250 Add<HLoadNamedField>(allocation_site, nullptr, 3251 HObjectAccess::ForAllocationSiteOffset( 3252 AllocationSite::kPretenureCreateCountOffset)); 3253 memento_create_count = AddUncasted<HAdd>( 3254 memento_create_count, graph()->GetConstant1()); 3255 // This smi value is reset to zero after every gc, overflow isn't a problem 3256 // since the counter is bounded by the new space size. 3257 memento_create_count->ClearFlag(HValue::kCanOverflow); 3258 Add<HStoreNamedField>( 3259 allocation_site, HObjectAccess::ForAllocationSiteOffset( 3260 AllocationSite::kPretenureCreateCountOffset), memento_create_count); 3261 } 3262 } 3263 3264 3265 HInstruction* HGraphBuilder::BuildGetNativeContext() { 3266 return Add<HLoadNamedField>( 3267 context(), nullptr, 3268 HObjectAccess::ForContextSlot(Context::NATIVE_CONTEXT_INDEX)); 3269 } 3270 3271 3272 HInstruction* HGraphBuilder::BuildGetNativeContext(HValue* closure) { 3273 // Get the global object, then the native context 3274 HInstruction* context = Add<HLoadNamedField>( 3275 closure, nullptr, HObjectAccess::ForFunctionContextPointer()); 3276 return Add<HLoadNamedField>( 3277 context, nullptr, 3278 HObjectAccess::ForContextSlot(Context::NATIVE_CONTEXT_INDEX)); 3279 } 3280 3281 3282 HInstruction* HGraphBuilder::BuildGetScriptContext(int context_index) { 3283 HValue* native_context = BuildGetNativeContext(); 3284 HValue* script_context_table = Add<HLoadNamedField>( 3285 native_context, nullptr, 3286 HObjectAccess::ForContextSlot(Context::SCRIPT_CONTEXT_TABLE_INDEX)); 3287 return Add<HLoadNamedField>(script_context_table, nullptr, 3288 HObjectAccess::ForScriptContext(context_index)); 3289 } 3290 3291 3292 HValue* HGraphBuilder::BuildGetParentContext(HValue* depth, int depth_value) { 3293 HValue* script_context = context(); 3294 if (depth != NULL) { 3295 HValue* zero = graph()->GetConstant0(); 3296 3297 Push(script_context); 3298 Push(depth); 3299 3300 LoopBuilder loop(this); 3301 loop.BeginBody(2); // Drop script_context and depth from last environment 3302 // to appease live range building without simulates. 3303 depth = Pop(); 3304 script_context = Pop(); 3305 3306 script_context = Add<HLoadNamedField>( 3307 script_context, nullptr, 3308 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX)); 3309 depth = AddUncasted<HSub>(depth, graph()->GetConstant1()); 3310 depth->ClearFlag(HValue::kCanOverflow); 3311 3312 IfBuilder if_break(this); 3313 if_break.If<HCompareNumericAndBranch, HValue*>(depth, zero, Token::EQ); 3314 if_break.Then(); 3315 { 3316 Push(script_context); // The result. 3317 loop.Break(); 3318 } 3319 if_break.Else(); 3320 { 3321 Push(script_context); 3322 Push(depth); 3323 } 3324 loop.EndBody(); 3325 if_break.End(); 3326 3327 script_context = Pop(); 3328 } else if (depth_value > 0) { 3329 // Unroll the above loop. 3330 for (int i = 0; i < depth_value; i++) { 3331 script_context = Add<HLoadNamedField>( 3332 script_context, nullptr, 3333 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX)); 3334 } 3335 } 3336 return script_context; 3337 } 3338 3339 3340 HInstruction* HGraphBuilder::BuildGetArrayFunction() { 3341 HInstruction* native_context = BuildGetNativeContext(); 3342 HInstruction* index = 3343 Add<HConstant>(static_cast<int32_t>(Context::ARRAY_FUNCTION_INDEX)); 3344 return Add<HLoadKeyed>(native_context, index, nullptr, nullptr, 3345 FAST_ELEMENTS); 3346 } 3347 3348 3349 HValue* HGraphBuilder::BuildArrayBufferViewFieldAccessor(HValue* object, 3350 HValue* checked_object, 3351 FieldIndex index) { 3352 NoObservableSideEffectsScope scope(this); 3353 HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset( 3354 index.offset(), Representation::Tagged()); 3355 HInstruction* buffer = Add<HLoadNamedField>( 3356 object, checked_object, HObjectAccess::ForJSArrayBufferViewBuffer()); 3357 HInstruction* field = Add<HLoadNamedField>(object, checked_object, access); 3358 3359 HInstruction* flags = Add<HLoadNamedField>( 3360 buffer, nullptr, HObjectAccess::ForJSArrayBufferBitField()); 3361 HValue* was_neutered_mask = 3362 Add<HConstant>(1 << JSArrayBuffer::WasNeutered::kShift); 3363 HValue* was_neutered_test = 3364 AddUncasted<HBitwise>(Token::BIT_AND, flags, was_neutered_mask); 3365 3366 IfBuilder if_was_neutered(this); 3367 if_was_neutered.If<HCompareNumericAndBranch>( 3368 was_neutered_test, graph()->GetConstant0(), Token::NE); 3369 if_was_neutered.Then(); 3370 Push(graph()->GetConstant0()); 3371 if_was_neutered.Else(); 3372 Push(field); 3373 if_was_neutered.End(); 3374 3375 return Pop(); 3376 } 3377 3378 3379 HGraphBuilder::JSArrayBuilder::JSArrayBuilder(HGraphBuilder* builder, 3380 ElementsKind kind, 3381 HValue* allocation_site_payload, 3382 HValue* constructor_function, 3383 AllocationSiteOverrideMode override_mode) : 3384 builder_(builder), 3385 kind_(kind), 3386 allocation_site_payload_(allocation_site_payload), 3387 constructor_function_(constructor_function) { 3388 DCHECK(!allocation_site_payload->IsConstant() || 3389 HConstant::cast(allocation_site_payload)->handle( 3390 builder_->isolate())->IsAllocationSite()); 3391 mode_ = override_mode == DISABLE_ALLOCATION_SITES 3392 ? DONT_TRACK_ALLOCATION_SITE 3393 : AllocationSite::GetMode(kind); 3394 } 3395 3396 3397 HGraphBuilder::JSArrayBuilder::JSArrayBuilder(HGraphBuilder* builder, 3398 ElementsKind kind, 3399 HValue* constructor_function) : 3400 builder_(builder), 3401 kind_(kind), 3402 mode_(DONT_TRACK_ALLOCATION_SITE), 3403 allocation_site_payload_(NULL), 3404 constructor_function_(constructor_function) { 3405 } 3406 3407 3408 HValue* HGraphBuilder::JSArrayBuilder::EmitMapCode() { 3409 if (!builder()->top_info()->IsStub()) { 3410 // A constant map is fine. 3411 Handle<Map> map(builder()->isolate()->get_initial_js_array_map(kind_), 3412 builder()->isolate()); 3413 return builder()->Add<HConstant>(map); 3414 } 3415 3416 if (constructor_function_ != NULL && kind_ == GetInitialFastElementsKind()) { 3417 // No need for a context lookup if the kind_ matches the initial 3418 // map, because we can just load the map in that case. 3419 HObjectAccess access = HObjectAccess::ForPrototypeOrInitialMap(); 3420 return builder()->Add<HLoadNamedField>(constructor_function_, nullptr, 3421 access); 3422 } 3423 3424 // TODO(mvstanton): we should always have a constructor function if we 3425 // are creating a stub. 3426 HInstruction* native_context = constructor_function_ != NULL 3427 ? builder()->BuildGetNativeContext(constructor_function_) 3428 : builder()->BuildGetNativeContext(); 3429 3430 HObjectAccess access = 3431 HObjectAccess::ForContextSlot(Context::ArrayMapIndex(kind_)); 3432 return builder()->Add<HLoadNamedField>(native_context, nullptr, access); 3433 } 3434 3435 3436 HValue* HGraphBuilder::JSArrayBuilder::EmitInternalMapCode() { 3437 // Find the map near the constructor function 3438 HObjectAccess access = HObjectAccess::ForPrototypeOrInitialMap(); 3439 return builder()->Add<HLoadNamedField>(constructor_function_, nullptr, 3440 access); 3441 } 3442 3443 3444 HAllocate* HGraphBuilder::JSArrayBuilder::AllocateEmptyArray() { 3445 HConstant* capacity = builder()->Add<HConstant>(initial_capacity()); 3446 return AllocateArray(capacity, 3447 capacity, 3448 builder()->graph()->GetConstant0()); 3449 } 3450 3451 3452 HAllocate* HGraphBuilder::JSArrayBuilder::AllocateArray( 3453 HValue* capacity, 3454 HConstant* capacity_upper_bound, 3455 HValue* length_field, 3456 FillMode fill_mode) { 3457 return AllocateArray(capacity, 3458 capacity_upper_bound->GetInteger32Constant(), 3459 length_field, 3460 fill_mode); 3461 } 3462 3463 3464 HAllocate* HGraphBuilder::JSArrayBuilder::AllocateArray( 3465 HValue* capacity, 3466 int capacity_upper_bound, 3467 HValue* length_field, 3468 FillMode fill_mode) { 3469 HConstant* elememts_size_upper_bound = capacity->IsInteger32Constant() 3470 ? HConstant::cast(capacity) 3471 : builder()->EstablishElementsAllocationSize(kind_, capacity_upper_bound); 3472 3473 HAllocate* array = AllocateArray(capacity, length_field, fill_mode); 3474 if (!elements_location_->has_size_upper_bound()) { 3475 elements_location_->set_size_upper_bound(elememts_size_upper_bound); 3476 } 3477 return array; 3478 } 3479 3480 3481 HAllocate* HGraphBuilder::JSArrayBuilder::AllocateArray( 3482 HValue* capacity, 3483 HValue* length_field, 3484 FillMode fill_mode) { 3485 // These HForceRepresentations are because we store these as fields in the 3486 // objects we construct, and an int32-to-smi HChange could deopt. Accept 3487 // the deopt possibility now, before allocation occurs. 3488 capacity = 3489 builder()->AddUncasted<HForceRepresentation>(capacity, 3490 Representation::Smi()); 3491 length_field = 3492 builder()->AddUncasted<HForceRepresentation>(length_field, 3493 Representation::Smi()); 3494 3495 // Generate size calculation code here in order to make it dominate 3496 // the JSArray allocation. 3497 HValue* elements_size = 3498 builder()->BuildCalculateElementsSize(kind_, capacity); 3499 3500 // Bail out for large objects. 3501 HValue* max_regular_heap_object_size = 3502 builder()->Add<HConstant>(Page::kMaxRegularHeapObjectSize); 3503 builder()->Add<HBoundsCheck>(elements_size, max_regular_heap_object_size); 3504 3505 // Allocate (dealing with failure appropriately) 3506 HAllocate* array_object = builder()->AllocateJSArrayObject(mode_); 3507 3508 // Fill in the fields: map, properties, length 3509 HValue* map; 3510 if (allocation_site_payload_ == NULL) { 3511 map = EmitInternalMapCode(); 3512 } else { 3513 map = EmitMapCode(); 3514 } 3515 3516 builder()->BuildJSArrayHeader(array_object, 3517 map, 3518 NULL, // set elements to empty fixed array 3519 mode_, 3520 kind_, 3521 allocation_site_payload_, 3522 length_field); 3523 3524 // Allocate and initialize the elements 3525 elements_location_ = builder()->BuildAllocateElements(kind_, elements_size); 3526 3527 builder()->BuildInitializeElementsHeader(elements_location_, kind_, capacity); 3528 3529 // Set the elements 3530 builder()->Add<HStoreNamedField>( 3531 array_object, HObjectAccess::ForElementsPointer(), elements_location_); 3532 3533 if (fill_mode == FILL_WITH_HOLE) { 3534 builder()->BuildFillElementsWithHole(elements_location_, kind_, 3535 graph()->GetConstant0(), capacity); 3536 } 3537 3538 return array_object; 3539 } 3540 3541 3542 HValue* HGraphBuilder::AddLoadJSBuiltin(int context_index) { 3543 HValue* native_context = BuildGetNativeContext(); 3544 HObjectAccess function_access = HObjectAccess::ForContextSlot(context_index); 3545 return Add<HLoadNamedField>(native_context, nullptr, function_access); 3546 } 3547 3548 3549 HOptimizedGraphBuilder::HOptimizedGraphBuilder(CompilationInfo* info) 3550 : HGraphBuilder(info), 3551 function_state_(NULL), 3552 initial_function_state_(this, info, NORMAL_RETURN, 0), 3553 ast_context_(NULL), 3554 break_scope_(NULL), 3555 inlined_count_(0), 3556 globals_(10, info->zone()), 3557 osr_(new(info->zone())