Home | History | Annotate | Download | only in crankshaft
      1 // Copyright 2013 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "src/crankshaft/hydrogen.h"
      6 
      7 #include <sstream>
      8 
      9 #include "src/allocation-site-scopes.h"
     10 #include "src/ast/ast-numbering.h"
     11 #include "src/ast/scopeinfo.h"
     12 #include "src/code-factory.h"
     13 #include "src/crankshaft/hydrogen-bce.h"
     14 #include "src/crankshaft/hydrogen-bch.h"
     15 #include "src/crankshaft/hydrogen-canonicalize.h"
     16 #include "src/crankshaft/hydrogen-check-elimination.h"
     17 #include "src/crankshaft/hydrogen-dce.h"
     18 #include "src/crankshaft/hydrogen-dehoist.h"
     19 #include "src/crankshaft/hydrogen-environment-liveness.h"
     20 #include "src/crankshaft/hydrogen-escape-analysis.h"
     21 #include "src/crankshaft/hydrogen-gvn.h"
     22 #include "src/crankshaft/hydrogen-infer-representation.h"
     23 #include "src/crankshaft/hydrogen-infer-types.h"
     24 #include "src/crankshaft/hydrogen-load-elimination.h"
     25 #include "src/crankshaft/hydrogen-mark-deoptimize.h"
     26 #include "src/crankshaft/hydrogen-mark-unreachable.h"
     27 #include "src/crankshaft/hydrogen-osr.h"
     28 #include "src/crankshaft/hydrogen-range-analysis.h"
     29 #include "src/crankshaft/hydrogen-redundant-phi.h"
     30 #include "src/crankshaft/hydrogen-removable-simulates.h"
     31 #include "src/crankshaft/hydrogen-representation-changes.h"
     32 #include "src/crankshaft/hydrogen-sce.h"
     33 #include "src/crankshaft/hydrogen-store-elimination.h"
     34 #include "src/crankshaft/hydrogen-uint32-analysis.h"
     35 #include "src/crankshaft/lithium-allocator.h"
     36 #include "src/crankshaft/typing.h"
     37 #include "src/full-codegen/full-codegen.h"
     38 #include "src/ic/call-optimization.h"
     39 #include "src/ic/ic.h"
     40 // GetRootConstructor
     41 #include "src/ic/ic-inl.h"
     42 #include "src/isolate-inl.h"
     43 #include "src/parsing/parser.h"
     44 #include "src/runtime/runtime.h"
     45 
     46 #if V8_TARGET_ARCH_IA32
     47 #include "src/crankshaft/ia32/lithium-codegen-ia32.h"  // NOLINT
     48 #elif V8_TARGET_ARCH_X64
     49 #include "src/crankshaft/x64/lithium-codegen-x64.h"  // NOLINT
     50 #elif V8_TARGET_ARCH_ARM64
     51 #include "src/crankshaft/arm64/lithium-codegen-arm64.h"  // NOLINT
     52 #elif V8_TARGET_ARCH_ARM
     53 #include "src/crankshaft/arm/lithium-codegen-arm.h"  // NOLINT
     54 #elif V8_TARGET_ARCH_PPC
     55 #include "src/crankshaft/ppc/lithium-codegen-ppc.h"  // NOLINT
     56 #elif V8_TARGET_ARCH_MIPS
     57 #include "src/crankshaft/mips/lithium-codegen-mips.h"  // NOLINT
     58 #elif V8_TARGET_ARCH_MIPS64
     59 #include "src/crankshaft/mips64/lithium-codegen-mips64.h"  // NOLINT
     60 #elif V8_TARGET_ARCH_X87
     61 #include "src/crankshaft/x87/lithium-codegen-x87.h"  // NOLINT
     62 #else
     63 #error Unsupported target architecture.
     64 #endif
     65 
     66 namespace v8 {
     67 namespace internal {
     68 
     69 HBasicBlock::HBasicBlock(HGraph* graph)
     70     : block_id_(graph->GetNextBlockID()),
     71       graph_(graph),
     72       phis_(4, graph->zone()),
     73       first_(NULL),
     74       last_(NULL),
     75       end_(NULL),
     76       loop_information_(NULL),
     77       predecessors_(2, graph->zone()),
     78       dominator_(NULL),
     79       dominated_blocks_(4, graph->zone()),
     80       last_environment_(NULL),
     81       argument_count_(-1),
     82       first_instruction_index_(-1),
     83       last_instruction_index_(-1),
     84       deleted_phis_(4, graph->zone()),
     85       parent_loop_header_(NULL),
     86       inlined_entry_block_(NULL),
     87       is_inline_return_target_(false),
     88       is_reachable_(true),
     89       dominates_loop_successors_(false),
     90       is_osr_entry_(false),
     91       is_ordered_(false) { }
     92 
     93 
     94 Isolate* HBasicBlock::isolate() const {
     95   return graph_->isolate();
     96 }
     97 
     98 
     99 void HBasicBlock::MarkUnreachable() {
    100   is_reachable_ = false;
    101 }
    102 
    103 
    104 void HBasicBlock::AttachLoopInformation() {
    105   DCHECK(!IsLoopHeader());
    106   loop_information_ = new(zone()) HLoopInformation(this, zone());
    107 }
    108 
    109 
    110 void HBasicBlock::DetachLoopInformation() {
    111   DCHECK(IsLoopHeader());
    112   loop_information_ = NULL;
    113 }
    114 
    115 
    116 void HBasicBlock::AddPhi(HPhi* phi) {
    117   DCHECK(!IsStartBlock());
    118   phis_.Add(phi, zone());
    119   phi->SetBlock(this);
    120 }
    121 
    122 
    123 void HBasicBlock::RemovePhi(HPhi* phi) {
    124   DCHECK(phi->block() == this);
    125   DCHECK(phis_.Contains(phi));
    126   phi->Kill();
    127   phis_.RemoveElement(phi);
    128   phi->SetBlock(NULL);
    129 }
    130 
    131 
    132 void HBasicBlock::AddInstruction(HInstruction* instr, SourcePosition position) {
    133   DCHECK(!IsStartBlock() || !IsFinished());
    134   DCHECK(!instr->IsLinked());
    135   DCHECK(!IsFinished());
    136 
    137   if (!position.IsUnknown()) {
    138     instr->set_position(position);
    139   }
    140   if (first_ == NULL) {
    141     DCHECK(last_environment() != NULL);
    142     DCHECK(!last_environment()->ast_id().IsNone());
    143     HBlockEntry* entry = new(zone()) HBlockEntry();
    144     entry->InitializeAsFirst(this);
    145     if (!position.IsUnknown()) {
    146       entry->set_position(position);
    147     } else {
    148       DCHECK(!FLAG_hydrogen_track_positions ||
    149              !graph()->info()->IsOptimizing() || instr->IsAbnormalExit());
    150     }
    151     first_ = last_ = entry;
    152   }
    153   instr->InsertAfter(last_);
    154 }
    155 
    156 
    157 HPhi* HBasicBlock::AddNewPhi(int merged_index) {
    158   if (graph()->IsInsideNoSideEffectsScope()) {
    159     merged_index = HPhi::kInvalidMergedIndex;
    160   }
    161   HPhi* phi = new(zone()) HPhi(merged_index, zone());
    162   AddPhi(phi);
    163   return phi;
    164 }
    165 
    166 
    167 HSimulate* HBasicBlock::CreateSimulate(BailoutId ast_id,
    168                                        RemovableSimulate removable) {
    169   DCHECK(HasEnvironment());
    170   HEnvironment* environment = last_environment();
    171   DCHECK(ast_id.IsNone() ||
    172          ast_id == BailoutId::StubEntry() ||
    173          environment->closure()->shared()->VerifyBailoutId(ast_id));
    174 
    175   int push_count = environment->push_count();
    176   int pop_count = environment->pop_count();
    177 
    178   HSimulate* instr =
    179       new(zone()) HSimulate(ast_id, pop_count, zone(), removable);
    180 #ifdef DEBUG
    181   instr->set_closure(environment->closure());
    182 #endif
    183   // Order of pushed values: newest (top of stack) first. This allows
    184   // HSimulate::MergeWith() to easily append additional pushed values
    185   // that are older (from further down the stack).
    186   for (int i = 0; i < push_count; ++i) {
    187     instr->AddPushedValue(environment->ExpressionStackAt(i));
    188   }
    189   for (GrowableBitVector::Iterator it(environment->assigned_variables(),
    190                                       zone());
    191        !it.Done();
    192        it.Advance()) {
    193     int index = it.Current();
    194     instr->AddAssignedValue(index, environment->Lookup(index));
    195   }
    196   environment->ClearHistory();
    197   return instr;
    198 }
    199 
    200 
    201 void HBasicBlock::Finish(HControlInstruction* end, SourcePosition position) {
    202   DCHECK(!IsFinished());
    203   AddInstruction(end, position);
    204   end_ = end;
    205   for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
    206     it.Current()->RegisterPredecessor(this);
    207   }
    208 }
    209 
    210 
    211 void HBasicBlock::Goto(HBasicBlock* block, SourcePosition position,
    212                        FunctionState* state, bool add_simulate) {
    213   bool drop_extra = state != NULL &&
    214       state->inlining_kind() == NORMAL_RETURN;
    215 
    216   if (block->IsInlineReturnTarget()) {
    217     HEnvironment* env = last_environment();
    218     int argument_count = env->arguments_environment()->parameter_count();
    219     AddInstruction(new(zone())
    220                    HLeaveInlined(state->entry(), argument_count),
    221                    position);
    222     UpdateEnvironment(last_environment()->DiscardInlined(drop_extra));
    223   }
    224 
    225   if (add_simulate) AddNewSimulate(BailoutId::None(), position);
    226   HGoto* instr = new(zone()) HGoto(block);
    227   Finish(instr, position);
    228 }
    229 
    230 
    231 void HBasicBlock::AddLeaveInlined(HValue* return_value, FunctionState* state,
    232                                   SourcePosition position) {
    233   HBasicBlock* target = state->function_return();
    234   bool drop_extra = state->inlining_kind() == NORMAL_RETURN;
    235 
    236   DCHECK(target->IsInlineReturnTarget());
    237   DCHECK(return_value != NULL);
    238   HEnvironment* env = last_environment();
    239   int argument_count = env->arguments_environment()->parameter_count();
    240   AddInstruction(new(zone()) HLeaveInlined(state->entry(), argument_count),
    241                  position);
    242   UpdateEnvironment(last_environment()->DiscardInlined(drop_extra));
    243   last_environment()->Push(return_value);
    244   AddNewSimulate(BailoutId::None(), position);
    245   HGoto* instr = new(zone()) HGoto(target);
    246   Finish(instr, position);
    247 }
    248 
    249 
    250 void HBasicBlock::SetInitialEnvironment(HEnvironment* env) {
    251   DCHECK(!HasEnvironment());
    252   DCHECK(first() == NULL);
    253   UpdateEnvironment(env);
    254 }
    255 
    256 
    257 void HBasicBlock::UpdateEnvironment(HEnvironment* env) {
    258   last_environment_ = env;
    259   graph()->update_maximum_environment_size(env->first_expression_index());
    260 }
    261 
    262 
    263 void HBasicBlock::SetJoinId(BailoutId ast_id) {
    264   int length = predecessors_.length();
    265   DCHECK(length > 0);
    266   for (int i = 0; i < length; i++) {
    267     HBasicBlock* predecessor = predecessors_[i];
    268     DCHECK(predecessor->end()->IsGoto());
    269     HSimulate* simulate = HSimulate::cast(predecessor->end()->previous());
    270     DCHECK(i != 0 ||
    271            (predecessor->last_environment()->closure().is_null() ||
    272             predecessor->last_environment()->closure()->shared()
    273               ->VerifyBailoutId(ast_id)));
    274     simulate->set_ast_id(ast_id);
    275     predecessor->last_environment()->set_ast_id(ast_id);
    276   }
    277 }
    278 
    279 
    280 bool HBasicBlock::Dominates(HBasicBlock* other) const {
    281   HBasicBlock* current = other->dominator();
    282   while (current != NULL) {
    283     if (current == this) return true;
    284     current = current->dominator();
    285   }
    286   return false;
    287 }
    288 
    289 
    290 bool HBasicBlock::EqualToOrDominates(HBasicBlock* other) const {
    291   if (this == other) return true;
    292   return Dominates(other);
    293 }
    294 
    295 
    296 int HBasicBlock::LoopNestingDepth() const {
    297   const HBasicBlock* current = this;
    298   int result  = (current->IsLoopHeader()) ? 1 : 0;
    299   while (current->parent_loop_header() != NULL) {
    300     current = current->parent_loop_header();
    301     result++;
    302   }
    303   return result;
    304 }
    305 
    306 
    307 void HBasicBlock::PostProcessLoopHeader(IterationStatement* stmt) {
    308   DCHECK(IsLoopHeader());
    309 
    310   SetJoinId(stmt->EntryId());
    311   if (predecessors()->length() == 1) {
    312     // This is a degenerated loop.
    313     DetachLoopInformation();
    314     return;
    315   }
    316 
    317   // Only the first entry into the loop is from outside the loop. All other
    318   // entries must be back edges.
    319   for (int i = 1; i < predecessors()->length(); ++i) {
    320     loop_information()->RegisterBackEdge(predecessors()->at(i));
    321   }
    322 }
    323 
    324 
    325 void HBasicBlock::MarkSuccEdgeUnreachable(int succ) {
    326   DCHECK(IsFinished());
    327   HBasicBlock* succ_block = end()->SuccessorAt(succ);
    328 
    329   DCHECK(succ_block->predecessors()->length() == 1);
    330   succ_block->MarkUnreachable();
    331 }
    332 
    333 
    334 void HBasicBlock::RegisterPredecessor(HBasicBlock* pred) {
    335   if (HasPredecessor()) {
    336     // Only loop header blocks can have a predecessor added after
    337     // instructions have been added to the block (they have phis for all
    338     // values in the environment, these phis may be eliminated later).
    339     DCHECK(IsLoopHeader() || first_ == NULL);
    340     HEnvironment* incoming_env = pred->last_environment();
    341     if (IsLoopHeader()) {
    342       DCHECK_EQ(phis()->length(), incoming_env->length());
    343       for (int i = 0; i < phis_.length(); ++i) {
    344         phis_[i]->AddInput(incoming_env->values()->at(i));
    345       }
    346     } else {
    347       last_environment()->AddIncomingEdge(this, pred->last_environment());
    348     }
    349   } else if (!HasEnvironment() && !IsFinished()) {
    350     DCHECK(!IsLoopHeader());
    351     SetInitialEnvironment(pred->last_environment()->Copy());
    352   }
    353 
    354   predecessors_.Add(pred, zone());
    355 }
    356 
    357 
    358 void HBasicBlock::AddDominatedBlock(HBasicBlock* block) {
    359   DCHECK(!dominated_blocks_.Contains(block));
    360   // Keep the list of dominated blocks sorted such that if there is two
    361   // succeeding block in this list, the predecessor is before the successor.
    362   int index = 0;
    363   while (index < dominated_blocks_.length() &&
    364          dominated_blocks_[index]->block_id() < block->block_id()) {
    365     ++index;
    366   }
    367   dominated_blocks_.InsertAt(index, block, zone());
    368 }
    369 
    370 
    371 void HBasicBlock::AssignCommonDominator(HBasicBlock* other) {
    372   if (dominator_ == NULL) {
    373     dominator_ = other;
    374     other->AddDominatedBlock(this);
    375   } else if (other->dominator() != NULL) {
    376     HBasicBlock* first = dominator_;
    377     HBasicBlock* second = other;
    378 
    379     while (first != second) {
    380       if (first->block_id() > second->block_id()) {
    381         first = first->dominator();
    382       } else {
    383         second = second->dominator();
    384       }
    385       DCHECK(first != NULL && second != NULL);
    386     }
    387 
    388     if (dominator_ != first) {
    389       DCHECK(dominator_->dominated_blocks_.Contains(this));
    390       dominator_->dominated_blocks_.RemoveElement(this);
    391       dominator_ = first;
    392       first->AddDominatedBlock(this);
    393     }
    394   }
    395 }
    396 
    397 
    398 void HBasicBlock::AssignLoopSuccessorDominators() {
    399   // Mark blocks that dominate all subsequent reachable blocks inside their
    400   // loop. Exploit the fact that blocks are sorted in reverse post order. When
    401   // the loop is visited in increasing block id order, if the number of
    402   // non-loop-exiting successor edges at the dominator_candidate block doesn't
    403   // exceed the number of previously encountered predecessor edges, there is no
    404   // path from the loop header to any block with higher id that doesn't go
    405   // through the dominator_candidate block. In this case, the
    406   // dominator_candidate block is guaranteed to dominate all blocks reachable
    407   // from it with higher ids.
    408   HBasicBlock* last = loop_information()->GetLastBackEdge();
    409   int outstanding_successors = 1;  // one edge from the pre-header
    410   // Header always dominates everything.
    411   MarkAsLoopSuccessorDominator();
    412   for (int j = block_id(); j <= last->block_id(); ++j) {
    413     HBasicBlock* dominator_candidate = graph_->blocks()->at(j);
    414     for (HPredecessorIterator it(dominator_candidate); !it.Done();
    415          it.Advance()) {
    416       HBasicBlock* predecessor = it.Current();
    417       // Don't count back edges.
    418       if (predecessor->block_id() < dominator_candidate->block_id()) {
    419         outstanding_successors--;
    420       }
    421     }
    422 
    423     // If more successors than predecessors have been seen in the loop up to
    424     // now, it's not possible to guarantee that the current block dominates
    425     // all of the blocks with higher IDs. In this case, assume conservatively
    426     // that those paths through loop that don't go through the current block
    427     // contain all of the loop's dependencies. Also be careful to record
    428     // dominator information about the current loop that's being processed,
    429     // and not nested loops, which will be processed when
    430     // AssignLoopSuccessorDominators gets called on their header.
    431     DCHECK(outstanding_successors >= 0);
    432     HBasicBlock* parent_loop_header = dominator_candidate->parent_loop_header();
    433     if (outstanding_successors == 0 &&
    434         (parent_loop_header == this && !dominator_candidate->IsLoopHeader())) {
    435       dominator_candidate->MarkAsLoopSuccessorDominator();
    436     }
    437     HControlInstruction* end = dominator_candidate->end();
    438     for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
    439       HBasicBlock* successor = it.Current();
    440       // Only count successors that remain inside the loop and don't loop back
    441       // to a loop header.
    442       if (successor->block_id() > dominator_candidate->block_id() &&
    443           successor->block_id() <= last->block_id()) {
    444         // Backwards edges must land on loop headers.
    445         DCHECK(successor->block_id() > dominator_candidate->block_id() ||
    446                successor->IsLoopHeader());
    447         outstanding_successors++;
    448       }
    449     }
    450   }
    451 }
    452 
    453 
    454 int HBasicBlock::PredecessorIndexOf(HBasicBlock* predecessor) const {
    455   for (int i = 0; i < predecessors_.length(); ++i) {
    456     if (predecessors_[i] == predecessor) return i;
    457   }
    458   UNREACHABLE();
    459   return -1;
    460 }
    461 
    462 
    463 #ifdef DEBUG
    464 void HBasicBlock::Verify() {
    465   // Check that every block is finished.
    466   DCHECK(IsFinished());
    467   DCHECK(block_id() >= 0);
    468 
    469   // Check that the incoming edges are in edge split form.
    470   if (predecessors_.length() > 1) {
    471     for (int i = 0; i < predecessors_.length(); ++i) {
    472       DCHECK(predecessors_[i]->end()->SecondSuccessor() == NULL);
    473     }
    474   }
    475 }
    476 #endif
    477 
    478 
    479 void HLoopInformation::RegisterBackEdge(HBasicBlock* block) {
    480   this->back_edges_.Add(block, block->zone());
    481   AddBlock(block);
    482 }
    483 
    484 
    485 HBasicBlock* HLoopInformation::GetLastBackEdge() const {
    486   int max_id = -1;
    487   HBasicBlock* result = NULL;
    488   for (int i = 0; i < back_edges_.length(); ++i) {
    489     HBasicBlock* cur = back_edges_[i];
    490     if (cur->block_id() > max_id) {
    491       max_id = cur->block_id();
    492       result = cur;
    493     }
    494   }
    495   return result;
    496 }
    497 
    498 
    499 void HLoopInformation::AddBlock(HBasicBlock* block) {
    500   if (block == loop_header()) return;
    501   if (block->parent_loop_header() == loop_header()) return;
    502   if (block->parent_loop_header() != NULL) {
    503     AddBlock(block->parent_loop_header());
    504   } else {
    505     block->set_parent_loop_header(loop_header());
    506     blocks_.Add(block, block->zone());
    507     for (int i = 0; i < block->predecessors()->length(); ++i) {
    508       AddBlock(block->predecessors()->at(i));
    509     }
    510   }
    511 }
    512 
    513 
    514 #ifdef DEBUG
    515 
    516 // Checks reachability of the blocks in this graph and stores a bit in
    517 // the BitVector "reachable()" for every block that can be reached
    518 // from the start block of the graph. If "dont_visit" is non-null, the given
    519 // block is treated as if it would not be part of the graph. "visited_count()"
    520 // returns the number of reachable blocks.
    521 class ReachabilityAnalyzer BASE_EMBEDDED {
    522  public:
    523   ReachabilityAnalyzer(HBasicBlock* entry_block,
    524                        int block_count,
    525                        HBasicBlock* dont_visit)
    526       : visited_count_(0),
    527         stack_(16, entry_block->zone()),
    528         reachable_(block_count, entry_block->zone()),
    529         dont_visit_(dont_visit) {
    530     PushBlock(entry_block);
    531     Analyze();
    532   }
    533 
    534   int visited_count() const { return visited_count_; }
    535   const BitVector* reachable() const { return &reachable_; }
    536 
    537  private:
    538   void PushBlock(HBasicBlock* block) {
    539     if (block != NULL && block != dont_visit_ &&
    540         !reachable_.Contains(block->block_id())) {
    541       reachable_.Add(block->block_id());
    542       stack_.Add(block, block->zone());
    543       visited_count_++;
    544     }
    545   }
    546 
    547   void Analyze() {
    548     while (!stack_.is_empty()) {
    549       HControlInstruction* end = stack_.RemoveLast()->end();
    550       for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
    551         PushBlock(it.Current());
    552       }
    553     }
    554   }
    555 
    556   int visited_count_;
    557   ZoneList<HBasicBlock*> stack_;
    558   BitVector reachable_;
    559   HBasicBlock* dont_visit_;
    560 };
    561 
    562 
    563 void HGraph::Verify(bool do_full_verify) const {
    564   Heap::RelocationLock relocation_lock(isolate()->heap());
    565   AllowHandleDereference allow_deref;
    566   AllowDeferredHandleDereference allow_deferred_deref;
    567   for (int i = 0; i < blocks_.length(); i++) {
    568     HBasicBlock* block = blocks_.at(i);
    569 
    570     block->Verify();
    571 
    572     // Check that every block contains at least one node and that only the last
    573     // node is a control instruction.
    574     HInstruction* current = block->first();
    575     DCHECK(current != NULL && current->IsBlockEntry());
    576     while (current != NULL) {
    577       DCHECK((current->next() == NULL) == current->IsControlInstruction());
    578       DCHECK(current->block() == block);
    579       current->Verify();
    580       current = current->next();
    581     }
    582 
    583     // Check that successors are correctly set.
    584     HBasicBlock* first = block->end()->FirstSuccessor();
    585     HBasicBlock* second = block->end()->SecondSuccessor();
    586     DCHECK(second == NULL || first != NULL);
    587 
    588     // Check that the predecessor array is correct.
    589     if (first != NULL) {
    590       DCHECK(first->predecessors()->Contains(block));
    591       if (second != NULL) {
    592         DCHECK(second->predecessors()->Contains(block));
    593       }
    594     }
    595 
    596     // Check that phis have correct arguments.
    597     for (int j = 0; j < block->phis()->length(); j++) {
    598       HPhi* phi = block->phis()->at(j);
    599       phi->Verify();
    600     }
    601 
    602     // Check that all join blocks have predecessors that end with an
    603     // unconditional goto and agree on their environment node id.
    604     if (block->predecessors()->length() >= 2) {
    605       BailoutId id =
    606           block->predecessors()->first()->last_environment()->ast_id();
    607       for (int k = 0; k < block->predecessors()->length(); k++) {
    608         HBasicBlock* predecessor = block->predecessors()->at(k);
    609         DCHECK(predecessor->end()->IsGoto() ||
    610                predecessor->end()->IsDeoptimize());
    611         DCHECK(predecessor->last_environment()->ast_id() == id);
    612       }
    613     }
    614   }
    615 
    616   // Check special property of first block to have no predecessors.
    617   DCHECK(blocks_.at(0)->predecessors()->is_empty());
    618 
    619   if (do_full_verify) {
    620     // Check that the graph is fully connected.
    621     ReachabilityAnalyzer analyzer(entry_block_, blocks_.length(), NULL);
    622     DCHECK(analyzer.visited_count() == blocks_.length());
    623 
    624     // Check that entry block dominator is NULL.
    625     DCHECK(entry_block_->dominator() == NULL);
    626 
    627     // Check dominators.
    628     for (int i = 0; i < blocks_.length(); ++i) {
    629       HBasicBlock* block = blocks_.at(i);
    630       if (block->dominator() == NULL) {
    631         // Only start block may have no dominator assigned to.
    632         DCHECK(i == 0);
    633       } else {
    634         // Assert that block is unreachable if dominator must not be visited.
    635         ReachabilityAnalyzer dominator_analyzer(entry_block_,
    636                                                 blocks_.length(),
    637                                                 block->dominator());
    638         DCHECK(!dominator_analyzer.reachable()->Contains(block->block_id()));
    639       }
    640     }
    641   }
    642 }
    643 
    644 #endif
    645 
    646 
    647 HConstant* HGraph::GetConstant(SetOncePointer<HConstant>* pointer,
    648                                int32_t value) {
    649   if (!pointer->is_set()) {
    650     // Can't pass GetInvalidContext() to HConstant::New, because that will
    651     // recursively call GetConstant
    652     HConstant* constant = HConstant::New(isolate(), zone(), NULL, value);
    653     constant->InsertAfter(entry_block()->first());
    654     pointer->set(constant);
    655     return constant;
    656   }
    657   return ReinsertConstantIfNecessary(pointer->get());
    658 }
    659 
    660 
    661 HConstant* HGraph::ReinsertConstantIfNecessary(HConstant* constant) {
    662   if (!constant->IsLinked()) {
    663     // The constant was removed from the graph. Reinsert.
    664     constant->ClearFlag(HValue::kIsDead);
    665     constant->InsertAfter(entry_block()->first());
    666   }
    667   return constant;
    668 }
    669 
    670 
    671 HConstant* HGraph::GetConstant0() {
    672   return GetConstant(&constant_0_, 0);
    673 }
    674 
    675 
    676 HConstant* HGraph::GetConstant1() {
    677   return GetConstant(&constant_1_, 1);
    678 }
    679 
    680 
    681 HConstant* HGraph::GetConstantMinus1() {
    682   return GetConstant(&constant_minus1_, -1);
    683 }
    684 
    685 
    686 HConstant* HGraph::GetConstantBool(bool value) {
    687   return value ? GetConstantTrue() : GetConstantFalse();
    688 }
    689 
    690 
    691 #define DEFINE_GET_CONSTANT(Name, name, type, htype, boolean_value)            \
    692 HConstant* HGraph::GetConstant##Name() {                                       \
    693   if (!constant_##name##_.is_set()) {                                          \
    694     HConstant* constant = new(zone()) HConstant(                               \
    695         Unique<Object>::CreateImmovable(isolate()->factory()->name##_value()), \
    696         Unique<Map>::CreateImmovable(isolate()->factory()->type##_map()),      \
    697         false,                                                                 \
    698         Representation::Tagged(),                                              \
    699         htype,                                                                 \
    700         true,                                                                  \
    701         boolean_value,                                                         \
    702         false,                                                                 \
    703         ODDBALL_TYPE);                                                         \
    704     constant->InsertAfter(entry_block()->first());                             \
    705     constant_##name##_.set(constant);                                          \
    706   }                                                                            \
    707   return ReinsertConstantIfNecessary(constant_##name##_.get());                \
    708 }
    709 
    710 
    711 DEFINE_GET_CONSTANT(Undefined, undefined, undefined, HType::Undefined(), false)
    712 DEFINE_GET_CONSTANT(True, true, boolean, HType::Boolean(), true)
    713 DEFINE_GET_CONSTANT(False, false, boolean, HType::Boolean(), false)
    714 DEFINE_GET_CONSTANT(Hole, the_hole, the_hole, HType::None(), false)
    715 DEFINE_GET_CONSTANT(Null, null, null, HType::Null(), false)
    716 
    717 
    718 #undef DEFINE_GET_CONSTANT
    719 
    720 #define DEFINE_IS_CONSTANT(Name, name)                                         \
    721 bool HGraph::IsConstant##Name(HConstant* constant) {                           \
    722   return constant_##name##_.is_set() && constant == constant_##name##_.get();  \
    723 }
    724 DEFINE_IS_CONSTANT(Undefined, undefined)
    725 DEFINE_IS_CONSTANT(0, 0)
    726 DEFINE_IS_CONSTANT(1, 1)
    727 DEFINE_IS_CONSTANT(Minus1, minus1)
    728 DEFINE_IS_CONSTANT(True, true)
    729 DEFINE_IS_CONSTANT(False, false)
    730 DEFINE_IS_CONSTANT(Hole, the_hole)
    731 DEFINE_IS_CONSTANT(Null, null)
    732 
    733 #undef DEFINE_IS_CONSTANT
    734 
    735 
    736 HConstant* HGraph::GetInvalidContext() {
    737   return GetConstant(&constant_invalid_context_, 0xFFFFC0C7);
    738 }
    739 
    740 
    741 bool HGraph::IsStandardConstant(HConstant* constant) {
    742   if (IsConstantUndefined(constant)) return true;
    743   if (IsConstant0(constant)) return true;
    744   if (IsConstant1(constant)) return true;
    745   if (IsConstantMinus1(constant)) return true;
    746   if (IsConstantTrue(constant)) return true;
    747   if (IsConstantFalse(constant)) return true;
    748   if (IsConstantHole(constant)) return true;
    749   if (IsConstantNull(constant)) return true;
    750   return false;
    751 }
    752 
    753 
    754 HGraphBuilder::IfBuilder::IfBuilder() : builder_(NULL), needs_compare_(true) {}
    755 
    756 
    757 HGraphBuilder::IfBuilder::IfBuilder(HGraphBuilder* builder)
    758     : needs_compare_(true) {
    759   Initialize(builder);
    760 }
    761 
    762 
    763 HGraphBuilder::IfBuilder::IfBuilder(HGraphBuilder* builder,
    764                                     HIfContinuation* continuation)
    765     : needs_compare_(false), first_true_block_(NULL), first_false_block_(NULL) {
    766   InitializeDontCreateBlocks(builder);
    767   continuation->Continue(&first_true_block_, &first_false_block_);
    768 }
    769 
    770 
    771 void HGraphBuilder::IfBuilder::InitializeDontCreateBlocks(
    772     HGraphBuilder* builder) {
    773   builder_ = builder;
    774   finished_ = false;
    775   did_then_ = false;
    776   did_else_ = false;
    777   did_else_if_ = false;
    778   did_and_ = false;
    779   did_or_ = false;
    780   captured_ = false;
    781   pending_merge_block_ = false;
    782   split_edge_merge_block_ = NULL;
    783   merge_at_join_blocks_ = NULL;
    784   normal_merge_at_join_block_count_ = 0;
    785   deopt_merge_at_join_block_count_ = 0;
    786 }
    787 
    788 
    789 void HGraphBuilder::IfBuilder::Initialize(HGraphBuilder* builder) {
    790   InitializeDontCreateBlocks(builder);
    791   HEnvironment* env = builder->environment();
    792   first_true_block_ = builder->CreateBasicBlock(env->Copy());
    793   first_false_block_ = builder->CreateBasicBlock(env->Copy());
    794 }
    795 
    796 
    797 HControlInstruction* HGraphBuilder::IfBuilder::AddCompare(
    798     HControlInstruction* compare) {
    799   DCHECK(did_then_ == did_else_);
    800   if (did_else_) {
    801     // Handle if-then-elseif
    802     did_else_if_ = true;
    803     did_else_ = false;
    804     did_then_ = false;
    805     did_and_ = false;
    806     did_or_ = false;
    807     pending_merge_block_ = false;
    808     split_edge_merge_block_ = NULL;
    809     HEnvironment* env = builder()->environment();
    810     first_true_block_ = builder()->CreateBasicBlock(env->Copy());
    811     first_false_block_ = builder()->CreateBasicBlock(env->Copy());
    812   }
    813   if (split_edge_merge_block_ != NULL) {
    814     HEnvironment* env = first_false_block_->last_environment();
    815     HBasicBlock* split_edge = builder()->CreateBasicBlock(env->Copy());
    816     if (did_or_) {
    817       compare->SetSuccessorAt(0, split_edge);
    818       compare->SetSuccessorAt(1, first_false_block_);
    819     } else {
    820       compare->SetSuccessorAt(0, first_true_block_);
    821       compare->SetSuccessorAt(1, split_edge);
    822     }
    823     builder()->GotoNoSimulate(split_edge, split_edge_merge_block_);
    824   } else {
    825     compare->SetSuccessorAt(0, first_true_block_);
    826     compare->SetSuccessorAt(1, first_false_block_);
    827   }
    828   builder()->FinishCurrentBlock(compare);
    829   needs_compare_ = false;
    830   return compare;
    831 }
    832 
    833 
    834 void HGraphBuilder::IfBuilder::Or() {
    835   DCHECK(!needs_compare_);
    836   DCHECK(!did_and_);
    837   did_or_ = true;
    838   HEnvironment* env = first_false_block_->last_environment();
    839   if (split_edge_merge_block_ == NULL) {
    840     split_edge_merge_block_ = builder()->CreateBasicBlock(env->Copy());
    841     builder()->GotoNoSimulate(first_true_block_, split_edge_merge_block_);
    842     first_true_block_ = split_edge_merge_block_;
    843   }
    844   builder()->set_current_block(first_false_block_);
    845   first_false_block_ = builder()->CreateBasicBlock(env->Copy());
    846 }
    847 
    848 
    849 void HGraphBuilder::IfBuilder::And() {
    850   DCHECK(!needs_compare_);
    851   DCHECK(!did_or_);
    852   did_and_ = true;
    853   HEnvironment* env = first_false_block_->last_environment();
    854   if (split_edge_merge_block_ == NULL) {
    855     split_edge_merge_block_ = builder()->CreateBasicBlock(env->Copy());
    856     builder()->GotoNoSimulate(first_false_block_, split_edge_merge_block_);
    857     first_false_block_ = split_edge_merge_block_;
    858   }
    859   builder()->set_current_block(first_true_block_);
    860   first_true_block_ = builder()->CreateBasicBlock(env->Copy());
    861 }
    862 
    863 
    864 void HGraphBuilder::IfBuilder::CaptureContinuation(
    865     HIfContinuation* continuation) {
    866   DCHECK(!did_else_if_);
    867   DCHECK(!finished_);
    868   DCHECK(!captured_);
    869 
    870   HBasicBlock* true_block = NULL;
    871   HBasicBlock* false_block = NULL;
    872   Finish(&true_block, &false_block);
    873   DCHECK(true_block != NULL);
    874   DCHECK(false_block != NULL);
    875   continuation->Capture(true_block, false_block);
    876   captured_ = true;
    877   builder()->set_current_block(NULL);
    878   End();
    879 }
    880 
    881 
    882 void HGraphBuilder::IfBuilder::JoinContinuation(HIfContinuation* continuation) {
    883   DCHECK(!did_else_if_);
    884   DCHECK(!finished_);
    885   DCHECK(!captured_);
    886   HBasicBlock* true_block = NULL;
    887   HBasicBlock* false_block = NULL;
    888   Finish(&true_block, &false_block);
    889   merge_at_join_blocks_ = NULL;
    890   if (true_block != NULL && !true_block->IsFinished()) {
    891     DCHECK(continuation->IsTrueReachable());
    892     builder()->GotoNoSimulate(true_block, continuation->true_branch());
    893   }
    894   if (false_block != NULL && !false_block->IsFinished()) {
    895     DCHECK(continuation->IsFalseReachable());
    896     builder()->GotoNoSimulate(false_block, continuation->false_branch());
    897   }
    898   captured_ = true;
    899   End();
    900 }
    901 
    902 
    903 void HGraphBuilder::IfBuilder::Then() {
    904   DCHECK(!captured_);
    905   DCHECK(!finished_);
    906   did_then_ = true;
    907   if (needs_compare_) {
    908     // Handle if's without any expressions, they jump directly to the "else"
    909     // branch. However, we must pretend that the "then" branch is reachable,
    910     // so that the graph builder visits it and sees any live range extending
    911     // constructs within it.
    912     HConstant* constant_false = builder()->graph()->GetConstantFalse();
    913     ToBooleanStub::Types boolean_type = ToBooleanStub::Types();
    914     boolean_type.Add(ToBooleanStub::BOOLEAN);
    915     HBranch* branch = builder()->New<HBranch>(
    916         constant_false, boolean_type, first_true_block_, first_false_block_);
    917     builder()->FinishCurrentBlock(branch);
    918   }
    919   builder()->set_current_block(first_true_block_);
    920   pending_merge_block_ = true;
    921 }
    922 
    923 
    924 void HGraphBuilder::IfBuilder::Else() {
    925   DCHECK(did_then_);
    926   DCHECK(!captured_);
    927   DCHECK(!finished_);
    928   AddMergeAtJoinBlock(false);
    929   builder()->set_current_block(first_false_block_);
    930   pending_merge_block_ = true;
    931   did_else_ = true;
    932 }
    933 
    934 
    935 void HGraphBuilder::IfBuilder::Deopt(Deoptimizer::DeoptReason reason) {
    936   DCHECK(did_then_);
    937   builder()->Add<HDeoptimize>(reason, Deoptimizer::EAGER);
    938   AddMergeAtJoinBlock(true);
    939 }
    940 
    941 
    942 void HGraphBuilder::IfBuilder::Return(HValue* value) {
    943   HValue* parameter_count = builder()->graph()->GetConstantMinus1();
    944   builder()->FinishExitCurrentBlock(
    945       builder()->New<HReturn>(value, parameter_count));
    946   AddMergeAtJoinBlock(false);
    947 }
    948 
    949 
    950 void HGraphBuilder::IfBuilder::AddMergeAtJoinBlock(bool deopt) {
    951   if (!pending_merge_block_) return;
    952   HBasicBlock* block = builder()->current_block();
    953   DCHECK(block == NULL || !block->IsFinished());
    954   MergeAtJoinBlock* record = new (builder()->zone())
    955       MergeAtJoinBlock(block, deopt, merge_at_join_blocks_);
    956   merge_at_join_blocks_ = record;
    957   if (block != NULL) {
    958     DCHECK(block->end() == NULL);
    959     if (deopt) {
    960       normal_merge_at_join_block_count_++;
    961     } else {
    962       deopt_merge_at_join_block_count_++;
    963     }
    964   }
    965   builder()->set_current_block(NULL);
    966   pending_merge_block_ = false;
    967 }
    968 
    969 
    970 void HGraphBuilder::IfBuilder::Finish() {
    971   DCHECK(!finished_);
    972   if (!did_then_) {
    973     Then();
    974   }
    975   AddMergeAtJoinBlock(false);
    976   if (!did_else_) {
    977     Else();
    978     AddMergeAtJoinBlock(false);
    979   }
    980   finished_ = true;
    981 }
    982 
    983 
    984 void HGraphBuilder::IfBuilder::Finish(HBasicBlock** then_continuation,
    985                                       HBasicBlock** else_continuation) {
    986   Finish();
    987 
    988   MergeAtJoinBlock* else_record = merge_at_join_blocks_;
    989   if (else_continuation != NULL) {
    990     *else_continuation = else_record->block_;
    991   }
    992   MergeAtJoinBlock* then_record = else_record->next_;
    993   if (then_continuation != NULL) {
    994     *then_continuation = then_record->block_;
    995   }
    996   DCHECK(then_record->next_ == NULL);
    997 }
    998 
    999 
   1000 void HGraphBuilder::IfBuilder::EndUnreachable() {
   1001   if (captured_) return;
   1002   Finish();
   1003   builder()->set_current_block(nullptr);
   1004 }
   1005 
   1006 
   1007 void HGraphBuilder::IfBuilder::End() {
   1008   if (captured_) return;
   1009   Finish();
   1010 
   1011   int total_merged_blocks = normal_merge_at_join_block_count_ +
   1012     deopt_merge_at_join_block_count_;
   1013   DCHECK(total_merged_blocks >= 1);
   1014   HBasicBlock* merge_block =
   1015       total_merged_blocks == 1 ? NULL : builder()->graph()->CreateBasicBlock();
   1016 
   1017   // Merge non-deopt blocks first to ensure environment has right size for
   1018   // padding.
   1019   MergeAtJoinBlock* current = merge_at_join_blocks_;
   1020   while (current != NULL) {
   1021     if (!current->deopt_ && current->block_ != NULL) {
   1022       // If there is only one block that makes it through to the end of the
   1023       // if, then just set it as the current block and continue rather then
   1024       // creating an unnecessary merge block.
   1025       if (total_merged_blocks == 1) {
   1026         builder()->set_current_block(current->block_);
   1027         return;
   1028       }
   1029       builder()->GotoNoSimulate(current->block_, merge_block);
   1030     }
   1031     current = current->next_;
   1032   }
   1033 
   1034   // Merge deopt blocks, padding when necessary.
   1035   current = merge_at_join_blocks_;
   1036   while (current != NULL) {
   1037     if (current->deopt_ && current->block_ != NULL) {
   1038       current->block_->FinishExit(
   1039           HAbnormalExit::New(builder()->isolate(), builder()->zone(), NULL),
   1040           SourcePosition::Unknown());
   1041     }
   1042     current = current->next_;
   1043   }
   1044   builder()->set_current_block(merge_block);
   1045 }
   1046 
   1047 
   1048 HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder) {
   1049   Initialize(builder, NULL, kWhileTrue, NULL);
   1050 }
   1051 
   1052 
   1053 HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder, HValue* context,
   1054                                         LoopBuilder::Direction direction) {
   1055   Initialize(builder, context, direction, builder->graph()->GetConstant1());
   1056 }
   1057 
   1058 
   1059 HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder, HValue* context,
   1060                                         LoopBuilder::Direction direction,
   1061                                         HValue* increment_amount) {
   1062   Initialize(builder, context, direction, increment_amount);
   1063   increment_amount_ = increment_amount;
   1064 }
   1065 
   1066 
   1067 void HGraphBuilder::LoopBuilder::Initialize(HGraphBuilder* builder,
   1068                                             HValue* context,
   1069                                             Direction direction,
   1070                                             HValue* increment_amount) {
   1071   builder_ = builder;
   1072   context_ = context;
   1073   direction_ = direction;
   1074   increment_amount_ = increment_amount;
   1075 
   1076   finished_ = false;
   1077   header_block_ = builder->CreateLoopHeaderBlock();
   1078   body_block_ = NULL;
   1079   exit_block_ = NULL;
   1080   exit_trampoline_block_ = NULL;
   1081 }
   1082 
   1083 
   1084 HValue* HGraphBuilder::LoopBuilder::BeginBody(
   1085     HValue* initial,
   1086     HValue* terminating,
   1087     Token::Value token) {
   1088   DCHECK(direction_ != kWhileTrue);
   1089   HEnvironment* env = builder_->environment();
   1090   phi_ = header_block_->AddNewPhi(env->values()->length());
   1091   phi_->AddInput(initial);
   1092   env->Push(initial);
   1093   builder_->GotoNoSimulate(header_block_);
   1094 
   1095   HEnvironment* body_env = env->Copy();
   1096   HEnvironment* exit_env = env->Copy();
   1097   // Remove the phi from the expression stack
   1098   body_env->Pop();
   1099   exit_env->Pop();
   1100   body_block_ = builder_->CreateBasicBlock(body_env);
   1101   exit_block_ = builder_->CreateBasicBlock(exit_env);
   1102 
   1103   builder_->set_current_block(header_block_);
   1104   env->Pop();
   1105   builder_->FinishCurrentBlock(builder_->New<HCompareNumericAndBranch>(
   1106           phi_, terminating, token, body_block_, exit_block_));
   1107 
   1108   builder_->set_current_block(body_block_);
   1109   if (direction_ == kPreIncrement || direction_ == kPreDecrement) {
   1110     Isolate* isolate = builder_->isolate();
   1111     HValue* one = builder_->graph()->GetConstant1();
   1112     if (direction_ == kPreIncrement) {
   1113       increment_ = HAdd::New(isolate, zone(), context_, phi_, one);
   1114     } else {
   1115       increment_ = HSub::New(isolate, zone(), context_, phi_, one);
   1116     }
   1117     increment_->ClearFlag(HValue::kCanOverflow);
   1118     builder_->AddInstruction(increment_);
   1119     return increment_;
   1120   } else {
   1121     return phi_;
   1122   }
   1123 }
   1124 
   1125 
   1126 void HGraphBuilder::LoopBuilder::BeginBody(int drop_count) {
   1127   DCHECK(direction_ == kWhileTrue);
   1128   HEnvironment* env = builder_->environment();
   1129   builder_->GotoNoSimulate(header_block_);
   1130   builder_->set_current_block(header_block_);
   1131   env->Drop(drop_count);
   1132 }
   1133 
   1134 
   1135 void HGraphBuilder::LoopBuilder::Break() {
   1136   if (exit_trampoline_block_ == NULL) {
   1137     // Its the first time we saw a break.
   1138     if (direction_ == kWhileTrue) {
   1139       HEnvironment* env = builder_->environment()->Copy();
   1140       exit_trampoline_block_ = builder_->CreateBasicBlock(env);
   1141     } else {
   1142       HEnvironment* env = exit_block_->last_environment()->Copy();
   1143       exit_trampoline_block_ = builder_->CreateBasicBlock(env);
   1144       builder_->GotoNoSimulate(exit_block_, exit_trampoline_block_);
   1145     }
   1146   }
   1147 
   1148   builder_->GotoNoSimulate(exit_trampoline_block_);
   1149   builder_->set_current_block(NULL);
   1150 }
   1151 
   1152 
   1153 void HGraphBuilder::LoopBuilder::EndBody() {
   1154   DCHECK(!finished_);
   1155 
   1156   if (direction_ == kPostIncrement || direction_ == kPostDecrement) {
   1157     Isolate* isolate = builder_->isolate();
   1158     if (direction_ == kPostIncrement) {
   1159       increment_ =
   1160           HAdd::New(isolate, zone(), context_, phi_, increment_amount_);
   1161     } else {
   1162       increment_ =
   1163           HSub::New(isolate, zone(), context_, phi_, increment_amount_);
   1164     }
   1165     increment_->ClearFlag(HValue::kCanOverflow);
   1166     builder_->AddInstruction(increment_);
   1167   }
   1168 
   1169   if (direction_ != kWhileTrue) {
   1170     // Push the new increment value on the expression stack to merge into
   1171     // the phi.
   1172     builder_->environment()->Push(increment_);
   1173   }
   1174   HBasicBlock* last_block = builder_->current_block();
   1175   builder_->GotoNoSimulate(last_block, header_block_);
   1176   header_block_->loop_information()->RegisterBackEdge(last_block);
   1177 
   1178   if (exit_trampoline_block_ != NULL) {
   1179     builder_->set_current_block(exit_trampoline_block_);
   1180   } else {
   1181     builder_->set_current_block(exit_block_);
   1182   }
   1183   finished_ = true;
   1184 }
   1185 
   1186 
   1187 HGraph* HGraphBuilder::CreateGraph() {
   1188   graph_ = new(zone()) HGraph(info_);
   1189   if (FLAG_hydrogen_stats) isolate()->GetHStatistics()->Initialize(info_);
   1190   CompilationPhase phase("H_Block building", info_);
   1191   set_current_block(graph()->entry_block());
   1192   if (!BuildGraph()) return NULL;
   1193   graph()->FinalizeUniqueness();
   1194   return graph_;
   1195 }
   1196 
   1197 
   1198 HInstruction* HGraphBuilder::AddInstruction(HInstruction* instr) {
   1199   DCHECK(current_block() != NULL);
   1200   DCHECK(!FLAG_hydrogen_track_positions ||
   1201          !position_.IsUnknown() ||
   1202          !info_->IsOptimizing());
   1203   current_block()->AddInstruction(instr, source_position());
   1204   if (graph()->IsInsideNoSideEffectsScope()) {
   1205     instr->SetFlag(HValue::kHasNoObservableSideEffects);
   1206   }
   1207   return instr;
   1208 }
   1209 
   1210 
   1211 void HGraphBuilder::FinishCurrentBlock(HControlInstruction* last) {
   1212   DCHECK(!FLAG_hydrogen_track_positions ||
   1213          !info_->IsOptimizing() ||
   1214          !position_.IsUnknown());
   1215   current_block()->Finish(last, source_position());
   1216   if (last->IsReturn() || last->IsAbnormalExit()) {
   1217     set_current_block(NULL);
   1218   }
   1219 }
   1220 
   1221 
   1222 void HGraphBuilder::FinishExitCurrentBlock(HControlInstruction* instruction) {
   1223   DCHECK(!FLAG_hydrogen_track_positions || !info_->IsOptimizing() ||
   1224          !position_.IsUnknown());
   1225   current_block()->FinishExit(instruction, source_position());
   1226   if (instruction->IsReturn() || instruction->IsAbnormalExit()) {
   1227     set_current_block(NULL);
   1228   }
   1229 }
   1230 
   1231 
   1232 void HGraphBuilder::AddIncrementCounter(StatsCounter* counter) {
   1233   if (FLAG_native_code_counters && counter->Enabled()) {
   1234     HValue* reference = Add<HConstant>(ExternalReference(counter));
   1235     HValue* old_value =
   1236         Add<HLoadNamedField>(reference, nullptr, HObjectAccess::ForCounter());
   1237     HValue* new_value = AddUncasted<HAdd>(old_value, graph()->GetConstant1());
   1238     new_value->ClearFlag(HValue::kCanOverflow);  // Ignore counter overflow
   1239     Add<HStoreNamedField>(reference, HObjectAccess::ForCounter(),
   1240                           new_value, STORE_TO_INITIALIZED_ENTRY);
   1241   }
   1242 }
   1243 
   1244 
   1245 void HGraphBuilder::AddSimulate(BailoutId id,
   1246                                 RemovableSimulate removable) {
   1247   DCHECK(current_block() != NULL);
   1248   DCHECK(!graph()->IsInsideNoSideEffectsScope());
   1249   current_block()->AddNewSimulate(id, source_position(), removable);
   1250 }
   1251 
   1252 
   1253 HBasicBlock* HGraphBuilder::CreateBasicBlock(HEnvironment* env) {
   1254   HBasicBlock* b = graph()->CreateBasicBlock();
   1255   b->SetInitialEnvironment(env);
   1256   return b;
   1257 }
   1258 
   1259 
   1260 HBasicBlock* HGraphBuilder::CreateLoopHeaderBlock() {
   1261   HBasicBlock* header = graph()->CreateBasicBlock();
   1262   HEnvironment* entry_env = environment()->CopyAsLoopHeader(header);
   1263   header->SetInitialEnvironment(entry_env);
   1264   header->AttachLoopInformation();
   1265   return header;
   1266 }
   1267 
   1268 
   1269 HValue* HGraphBuilder::BuildGetElementsKind(HValue* object) {
   1270   HValue* map = Add<HLoadNamedField>(object, nullptr, HObjectAccess::ForMap());
   1271 
   1272   HValue* bit_field2 =
   1273       Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField2());
   1274   return BuildDecodeField<Map::ElementsKindBits>(bit_field2);
   1275 }
   1276 
   1277 
   1278 HValue* HGraphBuilder::BuildCheckHeapObject(HValue* obj) {
   1279   if (obj->type().IsHeapObject()) return obj;
   1280   return Add<HCheckHeapObject>(obj);
   1281 }
   1282 
   1283 
   1284 void HGraphBuilder::FinishExitWithHardDeoptimization(
   1285     Deoptimizer::DeoptReason reason) {
   1286   Add<HDeoptimize>(reason, Deoptimizer::EAGER);
   1287   FinishExitCurrentBlock(New<HAbnormalExit>());
   1288 }
   1289 
   1290 
   1291 HValue* HGraphBuilder::BuildCheckString(HValue* string) {
   1292   if (!string->type().IsString()) {
   1293     DCHECK(!string->IsConstant() ||
   1294            !HConstant::cast(string)->HasStringValue());
   1295     BuildCheckHeapObject(string);
   1296     return Add<HCheckInstanceType>(string, HCheckInstanceType::IS_STRING);
   1297   }
   1298   return string;
   1299 }
   1300 
   1301 
   1302 HValue* HGraphBuilder::BuildWrapReceiver(HValue* object, HValue* function) {
   1303   if (object->type().IsJSObject()) return object;
   1304   if (function->IsConstant() &&
   1305       HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
   1306     Handle<JSFunction> f = Handle<JSFunction>::cast(
   1307         HConstant::cast(function)->handle(isolate()));
   1308     SharedFunctionInfo* shared = f->shared();
   1309     if (is_strict(shared->language_mode()) || shared->native()) return object;
   1310   }
   1311   return Add<HWrapReceiver>(object, function);
   1312 }
   1313 
   1314 
   1315 HValue* HGraphBuilder::BuildCheckAndGrowElementsCapacity(
   1316     HValue* object, HValue* elements, ElementsKind kind, HValue* length,
   1317     HValue* capacity, HValue* key) {
   1318   HValue* max_gap = Add<HConstant>(static_cast<int32_t>(JSObject::kMaxGap));
   1319   HValue* max_capacity = AddUncasted<HAdd>(capacity, max_gap);
   1320   Add<HBoundsCheck>(key, max_capacity);
   1321 
   1322   HValue* new_capacity = BuildNewElementsCapacity(key);
   1323   HValue* new_elements = BuildGrowElementsCapacity(object, elements, kind, kind,
   1324                                                    length, new_capacity);
   1325   return new_elements;
   1326 }
   1327 
   1328 
   1329 HValue* HGraphBuilder::BuildCheckForCapacityGrow(
   1330     HValue* object,
   1331     HValue* elements,
   1332     ElementsKind kind,
   1333     HValue* length,
   1334     HValue* key,
   1335     bool is_js_array,
   1336     PropertyAccessType access_type) {
   1337   IfBuilder length_checker(this);
   1338 
   1339   Token::Value token = IsHoleyElementsKind(kind) ? Token::GTE : Token::EQ;
   1340   length_checker.If<HCompareNumericAndBranch>(key, length, token);
   1341 
   1342   length_checker.Then();
   1343 
   1344   HValue* current_capacity = AddLoadFixedArrayLength(elements);
   1345 
   1346   if (top_info()->IsStub()) {
   1347     IfBuilder capacity_checker(this);
   1348     capacity_checker.If<HCompareNumericAndBranch>(key, current_capacity,
   1349                                                   Token::GTE);
   1350     capacity_checker.Then();
   1351     HValue* new_elements = BuildCheckAndGrowElementsCapacity(
   1352         object, elements, kind, length, current_capacity, key);
   1353     environment()->Push(new_elements);
   1354     capacity_checker.Else();
   1355     environment()->Push(elements);
   1356     capacity_checker.End();
   1357   } else {
   1358     HValue* result = Add<HMaybeGrowElements>(
   1359         object, elements, key, current_capacity, is_js_array, kind);
   1360     environment()->Push(result);
   1361   }
   1362 
   1363   if (is_js_array) {
   1364     HValue* new_length = AddUncasted<HAdd>(key, graph_->GetConstant1());
   1365     new_length->ClearFlag(HValue::kCanOverflow);
   1366 
   1367     Add<HStoreNamedField>(object, HObjectAccess::ForArrayLength(kind),
   1368                           new_length);
   1369   }
   1370 
   1371   if (access_type == STORE && kind == FAST_SMI_ELEMENTS) {
   1372     HValue* checked_elements = environment()->Top();
   1373 
   1374     // Write zero to ensure that the new element is initialized with some smi.
   1375     Add<HStoreKeyed>(checked_elements, key, graph()->GetConstant0(), nullptr,
   1376                      kind);
   1377   }
   1378 
   1379   length_checker.Else();
   1380   Add<HBoundsCheck>(key, length);
   1381 
   1382   environment()->Push(elements);
   1383   length_checker.End();
   1384 
   1385   return environment()->Pop();
   1386 }
   1387 
   1388 
   1389 HValue* HGraphBuilder::BuildCopyElementsOnWrite(HValue* object,
   1390                                                 HValue* elements,
   1391                                                 ElementsKind kind,
   1392                                                 HValue* length) {
   1393   Factory* factory = isolate()->factory();
   1394 
   1395   IfBuilder cow_checker(this);
   1396 
   1397   cow_checker.If<HCompareMap>(elements, factory->fixed_cow_array_map());
   1398   cow_checker.Then();
   1399 
   1400   HValue* capacity = AddLoadFixedArrayLength(elements);
   1401 
   1402   HValue* new_elements = BuildGrowElementsCapacity(object, elements, kind,
   1403                                                    kind, length, capacity);
   1404 
   1405   environment()->Push(new_elements);
   1406 
   1407   cow_checker.Else();
   1408 
   1409   environment()->Push(elements);
   1410 
   1411   cow_checker.End();
   1412 
   1413   return environment()->Pop();
   1414 }
   1415 
   1416 
   1417 void HGraphBuilder::BuildTransitionElementsKind(HValue* object,
   1418                                                 HValue* map,
   1419                                                 ElementsKind from_kind,
   1420                                                 ElementsKind to_kind,
   1421                                                 bool is_jsarray) {
   1422   DCHECK(!IsFastHoleyElementsKind(from_kind) ||
   1423          IsFastHoleyElementsKind(to_kind));
   1424 
   1425   if (AllocationSite::GetMode(from_kind, to_kind) == TRACK_ALLOCATION_SITE) {
   1426     Add<HTrapAllocationMemento>(object);
   1427   }
   1428 
   1429   if (!IsSimpleMapChangeTransition(from_kind, to_kind)) {
   1430     HInstruction* elements = AddLoadElements(object);
   1431 
   1432     HInstruction* empty_fixed_array = Add<HConstant>(
   1433         isolate()->factory()->empty_fixed_array());
   1434 
   1435     IfBuilder if_builder(this);
   1436 
   1437     if_builder.IfNot<HCompareObjectEqAndBranch>(elements, empty_fixed_array);
   1438 
   1439     if_builder.Then();
   1440 
   1441     HInstruction* elements_length = AddLoadFixedArrayLength(elements);
   1442 
   1443     HInstruction* array_length =
   1444         is_jsarray
   1445             ? Add<HLoadNamedField>(object, nullptr,
   1446                                    HObjectAccess::ForArrayLength(from_kind))
   1447             : elements_length;
   1448 
   1449     BuildGrowElementsCapacity(object, elements, from_kind, to_kind,
   1450                               array_length, elements_length);
   1451 
   1452     if_builder.End();
   1453   }
   1454 
   1455   Add<HStoreNamedField>(object, HObjectAccess::ForMap(), map);
   1456 }
   1457 
   1458 
   1459 void HGraphBuilder::BuildJSObjectCheck(HValue* receiver,
   1460                                        int bit_field_mask) {
   1461   // Check that the object isn't a smi.
   1462   Add<HCheckHeapObject>(receiver);
   1463 
   1464   // Get the map of the receiver.
   1465   HValue* map =
   1466       Add<HLoadNamedField>(receiver, nullptr, HObjectAccess::ForMap());
   1467 
   1468   // Check the instance type and if an access check is needed, this can be
   1469   // done with a single load, since both bytes are adjacent in the map.
   1470   HObjectAccess access(HObjectAccess::ForMapInstanceTypeAndBitField());
   1471   HValue* instance_type_and_bit_field =
   1472       Add<HLoadNamedField>(map, nullptr, access);
   1473 
   1474   HValue* mask = Add<HConstant>(0x00FF | (bit_field_mask << 8));
   1475   HValue* and_result = AddUncasted<HBitwise>(Token::BIT_AND,
   1476                                              instance_type_and_bit_field,
   1477                                              mask);
   1478   HValue* sub_result = AddUncasted<HSub>(and_result,
   1479                                          Add<HConstant>(JS_OBJECT_TYPE));
   1480   Add<HBoundsCheck>(sub_result,
   1481                     Add<HConstant>(LAST_JS_OBJECT_TYPE + 1 - JS_OBJECT_TYPE));
   1482 }
   1483 
   1484 
   1485 void HGraphBuilder::BuildKeyedIndexCheck(HValue* key,
   1486                                          HIfContinuation* join_continuation) {
   1487   // The sometimes unintuitively backward ordering of the ifs below is
   1488   // convoluted, but necessary.  All of the paths must guarantee that the
   1489   // if-true of the continuation returns a smi element index and the if-false of
   1490   // the continuation returns either a symbol or a unique string key. All other
   1491   // object types cause a deopt to fall back to the runtime.
   1492 
   1493   IfBuilder key_smi_if(this);
   1494   key_smi_if.If<HIsSmiAndBranch>(key);
   1495   key_smi_if.Then();
   1496   {
   1497     Push(key);  // Nothing to do, just continue to true of continuation.
   1498   }
   1499   key_smi_if.Else();
   1500   {
   1501     HValue* map = Add<HLoadNamedField>(key, nullptr, HObjectAccess::ForMap());
   1502     HValue* instance_type =
   1503         Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapInstanceType());
   1504 
   1505     // Non-unique string, check for a string with a hash code that is actually
   1506     // an index.
   1507     STATIC_ASSERT(LAST_UNIQUE_NAME_TYPE == FIRST_NONSTRING_TYPE);
   1508     IfBuilder not_string_or_name_if(this);
   1509     not_string_or_name_if.If<HCompareNumericAndBranch>(
   1510         instance_type,
   1511         Add<HConstant>(LAST_UNIQUE_NAME_TYPE),
   1512         Token::GT);
   1513 
   1514     not_string_or_name_if.Then();
   1515     {
   1516       // Non-smi, non-Name, non-String: Try to convert to smi in case of
   1517       // HeapNumber.
   1518       // TODO(danno): This could call some variant of ToString
   1519       Push(AddUncasted<HForceRepresentation>(key, Representation::Smi()));
   1520     }
   1521     not_string_or_name_if.Else();
   1522     {
   1523       // String or Name: check explicitly for Name, they can short-circuit
   1524       // directly to unique non-index key path.
   1525       IfBuilder not_symbol_if(this);
   1526       not_symbol_if.If<HCompareNumericAndBranch>(
   1527           instance_type,
   1528           Add<HConstant>(SYMBOL_TYPE),
   1529           Token::NE);
   1530 
   1531       not_symbol_if.Then();
   1532       {
   1533         // String: check whether the String is a String of an index. If it is,
   1534         // extract the index value from the hash.
   1535         HValue* hash = Add<HLoadNamedField>(key, nullptr,
   1536                                             HObjectAccess::ForNameHashField());
   1537         HValue* not_index_mask = Add<HConstant>(static_cast<int>(
   1538             String::kContainsCachedArrayIndexMask));
   1539 
   1540         HValue* not_index_test = AddUncasted<HBitwise>(
   1541             Token::BIT_AND, hash, not_index_mask);
   1542 
   1543         IfBuilder string_index_if(this);
   1544         string_index_if.If<HCompareNumericAndBranch>(not_index_test,
   1545                                                      graph()->GetConstant0(),
   1546                                                      Token::EQ);
   1547         string_index_if.Then();
   1548         {
   1549           // String with index in hash: extract string and merge to index path.
   1550           Push(BuildDecodeField<String::ArrayIndexValueBits>(hash));
   1551         }
   1552         string_index_if.Else();
   1553         {
   1554           // Key is a non-index String, check for uniqueness/internalization.
   1555           // If it's not internalized yet, internalize it now.
   1556           HValue* not_internalized_bit = AddUncasted<HBitwise>(
   1557               Token::BIT_AND,
   1558               instance_type,
   1559               Add<HConstant>(static_cast<int>(kIsNotInternalizedMask)));
   1560 
   1561           IfBuilder internalized(this);
   1562           internalized.If<HCompareNumericAndBranch>(not_internalized_bit,
   1563                                                     graph()->GetConstant0(),
   1564                                                     Token::EQ);
   1565           internalized.Then();
   1566           Push(key);
   1567 
   1568           internalized.Else();
   1569           Add<HPushArguments>(key);
   1570           HValue* intern_key = Add<HCallRuntime>(
   1571               Runtime::FunctionForId(Runtime::kInternalizeString), 1);
   1572           Push(intern_key);
   1573 
   1574           internalized.End();
   1575           // Key guaranteed to be a unique string
   1576         }
   1577         string_index_if.JoinContinuation(join_continuation);
   1578       }
   1579       not_symbol_if.Else();
   1580       {
   1581         Push(key);  // Key is symbol
   1582       }
   1583       not_symbol_if.JoinContinuation(join_continuation);
   1584     }
   1585     not_string_or_name_if.JoinContinuation(join_continuation);
   1586   }
   1587   key_smi_if.JoinContinuation(join_continuation);
   1588 }
   1589 
   1590 
   1591 void HGraphBuilder::BuildNonGlobalObjectCheck(HValue* receiver) {
   1592   // Get the the instance type of the receiver, and make sure that it is
   1593   // not one of the global object types.
   1594   HValue* map =
   1595       Add<HLoadNamedField>(receiver, nullptr, HObjectAccess::ForMap());
   1596   HValue* instance_type =
   1597       Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapInstanceType());
   1598   HValue* global_type = Add<HConstant>(JS_GLOBAL_OBJECT_TYPE);
   1599 
   1600   IfBuilder if_global_object(this);
   1601   if_global_object.If<HCompareNumericAndBranch>(instance_type, global_type,
   1602                                                 Token::EQ);
   1603   if_global_object.ThenDeopt(Deoptimizer::kReceiverWasAGlobalObject);
   1604   if_global_object.End();
   1605 }
   1606 
   1607 
   1608 void HGraphBuilder::BuildTestForDictionaryProperties(
   1609     HValue* object,
   1610     HIfContinuation* continuation) {
   1611   HValue* properties = Add<HLoadNamedField>(
   1612       object, nullptr, HObjectAccess::ForPropertiesPointer());
   1613   HValue* properties_map =
   1614       Add<HLoadNamedField>(properties, nullptr, HObjectAccess::ForMap());
   1615   HValue* hash_map = Add<HLoadRoot>(Heap::kHashTableMapRootIndex);
   1616   IfBuilder builder(this);
   1617   builder.If<HCompareObjectEqAndBranch>(properties_map, hash_map);
   1618   builder.CaptureContinuation(continuation);
   1619 }
   1620 
   1621 
   1622 HValue* HGraphBuilder::BuildKeyedLookupCacheHash(HValue* object,
   1623                                                  HValue* key) {
   1624   // Load the map of the receiver, compute the keyed lookup cache hash
   1625   // based on 32 bits of the map pointer and the string hash.
   1626   HValue* object_map =
   1627       Add<HLoadNamedField>(object, nullptr, HObjectAccess::ForMapAsInteger32());
   1628   HValue* shifted_map = AddUncasted<HShr>(
   1629       object_map, Add<HConstant>(KeyedLookupCache::kMapHashShift));
   1630   HValue* string_hash =
   1631       Add<HLoadNamedField>(key, nullptr, HObjectAccess::ForStringHashField());
   1632   HValue* shifted_hash = AddUncasted<HShr>(
   1633       string_hash, Add<HConstant>(String::kHashShift));
   1634   HValue* xor_result = AddUncasted<HBitwise>(Token::BIT_XOR, shifted_map,
   1635                                              shifted_hash);
   1636   int mask = (KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask);
   1637   return AddUncasted<HBitwise>(Token::BIT_AND, xor_result,
   1638                                Add<HConstant>(mask));
   1639 }
   1640 
   1641 
   1642 HValue* HGraphBuilder::BuildElementIndexHash(HValue* index) {
   1643   int32_t seed_value = static_cast<uint32_t>(isolate()->heap()->HashSeed());
   1644   HValue* seed = Add<HConstant>(seed_value);
   1645   HValue* hash = AddUncasted<HBitwise>(Token::BIT_XOR, index, seed);
   1646 
   1647   // hash = ~hash + (hash << 15);
   1648   HValue* shifted_hash = AddUncasted<HShl>(hash, Add<HConstant>(15));
   1649   HValue* not_hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash,
   1650                                            graph()->GetConstantMinus1());
   1651   hash = AddUncasted<HAdd>(shifted_hash, not_hash);
   1652 
   1653   // hash = hash ^ (hash >> 12);
   1654   shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(12));
   1655   hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
   1656 
   1657   // hash = hash + (hash << 2);
   1658   shifted_hash = AddUncasted<HShl>(hash, Add<HConstant>(2));
   1659   hash = AddUncasted<HAdd>(hash, shifted_hash);
   1660 
   1661   // hash = hash ^ (hash >> 4);
   1662   shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(4));
   1663   hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
   1664 
   1665   // hash = hash * 2057;
   1666   hash = AddUncasted<HMul>(hash, Add<HConstant>(2057));
   1667   hash->ClearFlag(HValue::kCanOverflow);
   1668 
   1669   // hash = hash ^ (hash >> 16);
   1670   shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(16));
   1671   return AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
   1672 }
   1673 
   1674 
   1675 HValue* HGraphBuilder::BuildUncheckedDictionaryElementLoad(
   1676     HValue* receiver, HValue* elements, HValue* key, HValue* hash,
   1677     LanguageMode language_mode) {
   1678   HValue* capacity =
   1679       Add<HLoadKeyed>(elements, Add<HConstant>(NameDictionary::kCapacityIndex),
   1680                       nullptr, nullptr, FAST_ELEMENTS);
   1681 
   1682   HValue* mask = AddUncasted<HSub>(capacity, graph()->GetConstant1());
   1683   mask->ChangeRepresentation(Representation::Integer32());
   1684   mask->ClearFlag(HValue::kCanOverflow);
   1685 
   1686   HValue* entry = hash;
   1687   HValue* count = graph()->GetConstant1();
   1688   Push(entry);
   1689   Push(count);
   1690 
   1691   HIfContinuation return_or_loop_continuation(graph()->CreateBasicBlock(),
   1692                                               graph()->CreateBasicBlock());
   1693   HIfContinuation found_key_match_continuation(graph()->CreateBasicBlock(),
   1694                                                graph()->CreateBasicBlock());
   1695   LoopBuilder probe_loop(this);
   1696   probe_loop.BeginBody(2);  // Drop entry, count from last environment to
   1697                             // appease live range building without simulates.
   1698 
   1699   count = Pop();
   1700   entry = Pop();
   1701   entry = AddUncasted<HBitwise>(Token::BIT_AND, entry, mask);
   1702   int entry_size = SeededNumberDictionary::kEntrySize;
   1703   HValue* base_index = AddUncasted<HMul>(entry, Add<HConstant>(entry_size));
   1704   base_index->ClearFlag(HValue::kCanOverflow);
   1705   int start_offset = SeededNumberDictionary::kElementsStartIndex;
   1706   HValue* key_index =
   1707       AddUncasted<HAdd>(base_index, Add<HConstant>(start_offset));
   1708   key_index->ClearFlag(HValue::kCanOverflow);
   1709 
   1710   HValue* candidate_key =
   1711       Add<HLoadKeyed>(elements, key_index, nullptr, nullptr, FAST_ELEMENTS);
   1712   IfBuilder if_undefined(this);
   1713   if_undefined.If<HCompareObjectEqAndBranch>(candidate_key,
   1714                                              graph()->GetConstantUndefined());
   1715   if_undefined.Then();
   1716   {
   1717     // element == undefined means "not found". Call the runtime.
   1718     // TODO(jkummerow): walk the prototype chain instead.
   1719     Add<HPushArguments>(receiver, key);
   1720     Push(Add<HCallRuntime>(
   1721         Runtime::FunctionForId(is_strong(language_mode)
   1722                                    ? Runtime::kKeyedGetPropertyStrong
   1723                                    : Runtime::kKeyedGetProperty),
   1724         2));
   1725   }
   1726   if_undefined.Else();
   1727   {
   1728     IfBuilder if_match(this);
   1729     if_match.If<HCompareObjectEqAndBranch>(candidate_key, key);
   1730     if_match.Then();
   1731     if_match.Else();
   1732 
   1733     // Update non-internalized string in the dictionary with internalized key?
   1734     IfBuilder if_update_with_internalized(this);
   1735     HValue* smi_check =
   1736         if_update_with_internalized.IfNot<HIsSmiAndBranch>(candidate_key);
   1737     if_update_with_internalized.And();
   1738     HValue* map = AddLoadMap(candidate_key, smi_check);
   1739     HValue* instance_type =
   1740         Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapInstanceType());
   1741     HValue* not_internalized_bit = AddUncasted<HBitwise>(
   1742         Token::BIT_AND, instance_type,
   1743         Add<HConstant>(static_cast<int>(kIsNotInternalizedMask)));
   1744     if_update_with_internalized.If<HCompareNumericAndBranch>(
   1745         not_internalized_bit, graph()->GetConstant0(), Token::NE);
   1746     if_update_with_internalized.And();
   1747     if_update_with_internalized.IfNot<HCompareObjectEqAndBranch>(
   1748         candidate_key, graph()->GetConstantHole());
   1749     if_update_with_internalized.AndIf<HStringCompareAndBranch>(candidate_key,
   1750                                                                key, Token::EQ);
   1751     if_update_with_internalized.Then();
   1752     // Replace a key that is a non-internalized string by the equivalent
   1753     // internalized string for faster further lookups.
   1754     Add<HStoreKeyed>(elements, key_index, key, nullptr, FAST_ELEMENTS);
   1755     if_update_with_internalized.Else();
   1756 
   1757     if_update_with_internalized.JoinContinuation(&found_key_match_continuation);
   1758     if_match.JoinContinuation(&found_key_match_continuation);
   1759 
   1760     IfBuilder found_key_match(this, &found_key_match_continuation);
   1761     found_key_match.Then();
   1762     // Key at current probe matches. Relevant bits in the |details| field must
   1763     // be zero, otherwise the dictionary element requires special handling.
   1764     HValue* details_index =
   1765         AddUncasted<HAdd>(base_index, Add<HConstant>(start_offset + 2));
   1766     details_index->ClearFlag(HValue::kCanOverflow);
   1767     HValue* details = Add<HLoadKeyed>(elements, details_index, nullptr, nullptr,
   1768                                       FAST_ELEMENTS);
   1769     int details_mask = PropertyDetails::TypeField::kMask;
   1770     details = AddUncasted<HBitwise>(Token::BIT_AND, details,
   1771                                     Add<HConstant>(details_mask));
   1772     IfBuilder details_compare(this);
   1773     details_compare.If<HCompareNumericAndBranch>(
   1774         details, graph()->GetConstant0(), Token::EQ);
   1775     details_compare.Then();
   1776     HValue* result_index =
   1777         AddUncasted<HAdd>(base_index, Add<HConstant>(start_offset + 1));
   1778     result_index->ClearFlag(HValue::kCanOverflow);
   1779     Push(Add<HLoadKeyed>(elements, result_index, nullptr, nullptr,
   1780                          FAST_ELEMENTS));
   1781     details_compare.Else();
   1782     Add<HPushArguments>(receiver, key);
   1783     Push(Add<HCallRuntime>(
   1784         Runtime::FunctionForId(is_strong(language_mode)
   1785                                    ? Runtime::kKeyedGetPropertyStrong
   1786                                    : Runtime::kKeyedGetProperty),
   1787         2));
   1788     details_compare.End();
   1789 
   1790     found_key_match.Else();
   1791     found_key_match.JoinContinuation(&return_or_loop_continuation);
   1792   }
   1793   if_undefined.JoinContinuation(&return_or_loop_continuation);
   1794 
   1795   IfBuilder return_or_loop(this, &return_or_loop_continuation);
   1796   return_or_loop.Then();
   1797   probe_loop.Break();
   1798 
   1799   return_or_loop.Else();
   1800   entry = AddUncasted<HAdd>(entry, count);
   1801   entry->ClearFlag(HValue::kCanOverflow);
   1802   count = AddUncasted<HAdd>(count, graph()->GetConstant1());
   1803   count->ClearFlag(HValue::kCanOverflow);
   1804   Push(entry);
   1805   Push(count);
   1806 
   1807   probe_loop.EndBody();
   1808 
   1809   return_or_loop.End();
   1810 
   1811   return Pop();
   1812 }
   1813 
   1814 
   1815 HValue* HGraphBuilder::BuildCreateIterResultObject(HValue* value,
   1816                                                    HValue* done) {
   1817   NoObservableSideEffectsScope scope(this);
   1818 
   1819   // Allocate the JSIteratorResult object.
   1820   HValue* result =
   1821       Add<HAllocate>(Add<HConstant>(JSIteratorResult::kSize), HType::JSObject(),
   1822                      NOT_TENURED, JS_ITERATOR_RESULT_TYPE);
   1823 
   1824   // Initialize the JSIteratorResult object.
   1825   HValue* native_context = BuildGetNativeContext();
   1826   HValue* map = Add<HLoadNamedField>(
   1827       native_context, nullptr,
   1828       HObjectAccess::ForContextSlot(Context::ITERATOR_RESULT_MAP_INDEX));
   1829   Add<HStoreNamedField>(result, HObjectAccess::ForMap(), map);
   1830   HValue* empty_fixed_array = Add<HLoadRoot>(Heap::kEmptyFixedArrayRootIndex);
   1831   Add<HStoreNamedField>(result, HObjectAccess::ForPropertiesPointer(),
   1832                         empty_fixed_array);
   1833   Add<HStoreNamedField>(result, HObjectAccess::ForElementsPointer(),
   1834                         empty_fixed_array);
   1835   Add<HStoreNamedField>(result, HObjectAccess::ForObservableJSObjectOffset(
   1836                                     JSIteratorResult::kValueOffset),
   1837                         value);
   1838   Add<HStoreNamedField>(result, HObjectAccess::ForObservableJSObjectOffset(
   1839                                     JSIteratorResult::kDoneOffset),
   1840                         done);
   1841   STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
   1842   return result;
   1843 }
   1844 
   1845 
   1846 HValue* HGraphBuilder::BuildRegExpConstructResult(HValue* length,
   1847                                                   HValue* index,
   1848                                                   HValue* input) {
   1849   NoObservableSideEffectsScope scope(this);
   1850   HConstant* max_length = Add<HConstant>(JSArray::kInitialMaxFastElementArray);
   1851   Add<HBoundsCheck>(length, max_length);
   1852 
   1853   // Generate size calculation code here in order to make it dominate
   1854   // the JSRegExpResult allocation.
   1855   ElementsKind elements_kind = FAST_ELEMENTS;
   1856   HValue* size = BuildCalculateElementsSize(elements_kind, length);
   1857 
   1858   // Allocate the JSRegExpResult and the FixedArray in one step.
   1859   HValue* result = Add<HAllocate>(
   1860       Add<HConstant>(JSRegExpResult::kSize), HType::JSArray(),
   1861       NOT_TENURED, JS_ARRAY_TYPE);
   1862 
   1863   // Initialize the JSRegExpResult header.
   1864   HValue* native_context = Add<HLoadNamedField>(
   1865       context(), nullptr,
   1866       HObjectAccess::ForContextSlot(Context::NATIVE_CONTEXT_INDEX));
   1867   Add<HStoreNamedField>(
   1868       result, HObjectAccess::ForMap(),
   1869       Add<HLoadNamedField>(
   1870           native_context, nullptr,
   1871           HObjectAccess::ForContextSlot(Context::REGEXP_RESULT_MAP_INDEX)));
   1872   HConstant* empty_fixed_array =
   1873       Add<HConstant>(isolate()->factory()->empty_fixed_array());
   1874   Add<HStoreNamedField>(
   1875       result, HObjectAccess::ForJSArrayOffset(JSArray::kPropertiesOffset),
   1876       empty_fixed_array);
   1877   Add<HStoreNamedField>(
   1878       result, HObjectAccess::ForJSArrayOffset(JSArray::kElementsOffset),
   1879       empty_fixed_array);
   1880   Add<HStoreNamedField>(
   1881       result, HObjectAccess::ForJSArrayOffset(JSArray::kLengthOffset), length);
   1882 
   1883   // Initialize the additional fields.
   1884   Add<HStoreNamedField>(
   1885       result, HObjectAccess::ForJSArrayOffset(JSRegExpResult::kIndexOffset),
   1886       index);
   1887   Add<HStoreNamedField>(
   1888       result, HObjectAccess::ForJSArrayOffset(JSRegExpResult::kInputOffset),
   1889       input);
   1890 
   1891   // Allocate and initialize the elements header.
   1892   HAllocate* elements = BuildAllocateElements(elements_kind, size);
   1893   BuildInitializeElementsHeader(elements, elements_kind, length);
   1894 
   1895   if (!elements->has_size_upper_bound()) {
   1896     HConstant* size_in_bytes_upper_bound = EstablishElementsAllocationSize(
   1897         elements_kind, max_length->Integer32Value());
   1898     elements->set_size_upper_bound(size_in_bytes_upper_bound);
   1899   }
   1900 
   1901   Add<HStoreNamedField>(
   1902       result, HObjectAccess::ForJSArrayOffset(JSArray::kElementsOffset),
   1903       elements);
   1904 
   1905   // Initialize the elements contents with undefined.
   1906   BuildFillElementsWithValue(
   1907       elements, elements_kind, graph()->GetConstant0(), length,
   1908       graph()->GetConstantUndefined());
   1909 
   1910   return result;
   1911 }
   1912 
   1913 
   1914 HValue* HGraphBuilder::BuildNumberToString(HValue* object, Type* type) {
   1915   NoObservableSideEffectsScope scope(this);
   1916 
   1917   // Convert constant numbers at compile time.
   1918   if (object->IsConstant() && HConstant::cast(object)->HasNumberValue()) {
   1919     Handle<Object> number = HConstant::cast(object)->handle(isolate());
   1920     Handle<String> result = isolate()->factory()->NumberToString(number);
   1921     return Add<HConstant>(result);
   1922   }
   1923 
   1924   // Create a joinable continuation.
   1925   HIfContinuation found(graph()->CreateBasicBlock(),
   1926                         graph()->CreateBasicBlock());
   1927 
   1928   // Load the number string cache.
   1929   HValue* number_string_cache =
   1930       Add<HLoadRoot>(Heap::kNumberStringCacheRootIndex);
   1931 
   1932   // Make the hash mask from the length of the number string cache. It
   1933   // contains two elements (number and string) for each cache entry.
   1934   HValue* mask = AddLoadFixedArrayLength(number_string_cache);
   1935   mask->set_type(HType::Smi());
   1936   mask = AddUncasted<HSar>(mask, graph()->GetConstant1());
   1937   mask = AddUncasted<HSub>(mask, graph()->GetConstant1());
   1938 
   1939   // Check whether object is a smi.
   1940   IfBuilder if_objectissmi(this);
   1941   if_objectissmi.If<HIsSmiAndBranch>(object);
   1942   if_objectissmi.Then();
   1943   {
   1944     // Compute hash for smi similar to smi_get_hash().
   1945     HValue* hash = AddUncasted<HBitwise>(Token::BIT_AND, object, mask);
   1946 
   1947     // Load the key.
   1948     HValue* key_index = AddUncasted<HShl>(hash, graph()->GetConstant1());
   1949     HValue* key = Add<HLoadKeyed>(number_string_cache, key_index, nullptr,
   1950                                   nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE);
   1951 
   1952     // Check if object == key.
   1953     IfBuilder if_objectiskey(this);
   1954     if_objectiskey.If<HCompareObjectEqAndBranch>(object, key);
   1955     if_objectiskey.Then();
   1956     {
   1957       // Make the key_index available.
   1958       Push(key_index);
   1959     }
   1960     if_objectiskey.JoinContinuation(&found);
   1961   }
   1962   if_objectissmi.Else();
   1963   {
   1964     if (type->Is(Type::SignedSmall())) {
   1965       if_objectissmi.Deopt(Deoptimizer::kExpectedSmi);
   1966     } else {
   1967       // Check if the object is a heap number.
   1968       IfBuilder if_objectisnumber(this);
   1969       HValue* objectisnumber = if_objectisnumber.If<HCompareMap>(
   1970           object, isolate()->factory()->heap_number_map());
   1971       if_objectisnumber.Then();
   1972       {
   1973         // Compute hash for heap number similar to double_get_hash().
   1974         HValue* low = Add<HLoadNamedField>(
   1975             object, objectisnumber,
   1976             HObjectAccess::ForHeapNumberValueLowestBits());
   1977         HValue* high = Add<HLoadNamedField>(
   1978             object, objectisnumber,
   1979             HObjectAccess::ForHeapNumberValueHighestBits());
   1980         HValue* hash = AddUncasted<HBitwise>(Token::BIT_XOR, low, high);
   1981         hash = AddUncasted<HBitwise>(Token::BIT_AND, hash, mask);
   1982 
   1983         // Load the key.
   1984         HValue* key_index = AddUncasted<HShl>(hash, graph()->GetConstant1());
   1985         HValue* key =
   1986             Add<HLoadKeyed>(number_string_cache, key_index, nullptr, nullptr,
   1987                             FAST_ELEMENTS, ALLOW_RETURN_HOLE);
   1988 
   1989         // Check if the key is a heap number and compare it with the object.
   1990         IfBuilder if_keyisnotsmi(this);
   1991         HValue* keyisnotsmi = if_keyisnotsmi.IfNot<HIsSmiAndBranch>(key);
   1992         if_keyisnotsmi.Then();
   1993         {
   1994           IfBuilder if_keyisheapnumber(this);
   1995           if_keyisheapnumber.If<HCompareMap>(
   1996               key, isolate()->factory()->heap_number_map());
   1997           if_keyisheapnumber.Then();
   1998           {
   1999             // Check if values of key and object match.
   2000             IfBuilder if_keyeqobject(this);
   2001             if_keyeqobject.If<HCompareNumericAndBranch>(
   2002                 Add<HLoadNamedField>(key, keyisnotsmi,
   2003                                      HObjectAccess::ForHeapNumberValue()),
   2004                 Add<HLoadNamedField>(object, objectisnumber,
   2005                                      HObjectAccess::ForHeapNumberValue()),
   2006                 Token::EQ);
   2007             if_keyeqobject.Then();
   2008             {
   2009               // Make the key_index available.
   2010               Push(key_index);
   2011             }
   2012             if_keyeqobject.JoinContinuation(&found);
   2013           }
   2014           if_keyisheapnumber.JoinContinuation(&found);
   2015         }
   2016         if_keyisnotsmi.JoinContinuation(&found);
   2017       }
   2018       if_objectisnumber.Else();
   2019       {
   2020         if (type->Is(Type::Number())) {
   2021           if_objectisnumber.Deopt(Deoptimizer::kExpectedHeapNumber);
   2022         }
   2023       }
   2024       if_objectisnumber.JoinContinuation(&found);
   2025     }
   2026   }
   2027   if_objectissmi.JoinContinuation(&found);
   2028 
   2029   // Check for cache hit.
   2030   IfBuilder if_found(this, &found);
   2031   if_found.Then();
   2032   {
   2033     // Count number to string operation in native code.
   2034     AddIncrementCounter(isolate()->counters()->number_to_string_native());
   2035 
   2036     // Load the value in case of cache hit.
   2037     HValue* key_index = Pop();
   2038     HValue* value_index = AddUncasted<HAdd>(key_index, graph()->GetConstant1());
   2039     Push(Add<HLoadKeyed>(number_string_cache, value_index, nullptr, nullptr,
   2040                          FAST_ELEMENTS, ALLOW_RETURN_HOLE));
   2041   }
   2042   if_found.Else();
   2043   {
   2044     // Cache miss, fallback to runtime.
   2045     Add<HPushArguments>(object);
   2046     Push(Add<HCallRuntime>(
   2047             Runtime::FunctionForId(Runtime::kNumberToStringSkipCache),
   2048             1));
   2049   }
   2050   if_found.End();
   2051 
   2052   return Pop();
   2053 }
   2054 
   2055 
   2056 HValue* HGraphBuilder::BuildToObject(HValue* receiver) {
   2057   NoObservableSideEffectsScope scope(this);
   2058 
   2059   // Create a joinable continuation.
   2060   HIfContinuation wrap(graph()->CreateBasicBlock(),
   2061                        graph()->CreateBasicBlock());
   2062 
   2063   // Determine the proper global constructor function required to wrap
   2064   // {receiver} into a JSValue, unless {receiver} is already a {JSReceiver}, in
   2065   // which case we just return it.  Deopts to Runtime::kToObject if {receiver}
   2066   // is undefined or null.
   2067   IfBuilder receiver_is_smi(this);
   2068   receiver_is_smi.If<HIsSmiAndBranch>(receiver);
   2069   receiver_is_smi.Then();
   2070   {
   2071     // Use global Number function.
   2072     Push(Add<HConstant>(Context::NUMBER_FUNCTION_INDEX));
   2073   }
   2074   receiver_is_smi.Else();
   2075   {
   2076     // Determine {receiver} map and instance type.
   2077     HValue* receiver_map =
   2078         Add<HLoadNamedField>(receiver, nullptr, HObjectAccess::ForMap());
   2079     HValue* receiver_instance_type = Add<HLoadNamedField>(
   2080         receiver_map, nullptr, HObjectAccess::ForMapInstanceType());
   2081 
   2082     // First check whether {receiver} is already a spec object (fast case).
   2083     IfBuilder receiver_is_not_spec_object(this);
   2084     receiver_is_not_spec_object.If<HCompareNumericAndBranch>(
   2085         receiver_instance_type, Add<HConstant>(FIRST_JS_RECEIVER_TYPE),
   2086         Token::LT);
   2087     receiver_is_not_spec_object.Then();
   2088     {
   2089       // Load the constructor function index from the {receiver} map.
   2090       HValue* constructor_function_index = Add<HLoadNamedField>(
   2091           receiver_map, nullptr,
   2092           HObjectAccess::ForMapInObjectPropertiesOrConstructorFunctionIndex());
   2093 
   2094       // Check if {receiver} has a constructor (null and undefined have no
   2095       // constructors, so we deoptimize to the runtime to throw an exception).
   2096       IfBuilder constructor_function_index_is_invalid(this);
   2097       constructor_function_index_is_invalid.If<HCompareNumericAndBranch>(
   2098           constructor_function_index,
   2099           Add<HConstant>(Map::kNoConstructorFunctionIndex), Token::EQ);
   2100       constructor_function_index_is_invalid.ThenDeopt(
   2101           Deoptimizer::kUndefinedOrNullInToObject);
   2102       constructor_function_index_is_invalid.End();
   2103 
   2104       // Use the global constructor function.
   2105       Push(constructor_function_index);
   2106     }
   2107     receiver_is_not_spec_object.JoinContinuation(&wrap);
   2108   }
   2109   receiver_is_smi.JoinContinuation(&wrap);
   2110 
   2111   // Wrap the receiver if necessary.
   2112   IfBuilder if_wrap(this, &wrap);
   2113   if_wrap.Then();
   2114   {
   2115     // Grab the constructor function index.
   2116     HValue* constructor_index = Pop();
   2117 
   2118     // Load native context.
   2119     HValue* native_context = BuildGetNativeContext();
   2120 
   2121     // Determine the initial map for the global constructor.
   2122     HValue* constructor = Add<HLoadKeyed>(native_context, constructor_index,
   2123                                           nullptr, nullptr, FAST_ELEMENTS);
   2124     HValue* constructor_initial_map = Add<HLoadNamedField>(
   2125         constructor, nullptr, HObjectAccess::ForPrototypeOrInitialMap());
   2126     // Allocate and initialize a JSValue wrapper.
   2127     HValue* value =
   2128         BuildAllocate(Add<HConstant>(JSValue::kSize), HType::JSObject(),
   2129                       JS_VALUE_TYPE, HAllocationMode());
   2130     Add<HStoreNamedField>(value, HObjectAccess::ForMap(),
   2131                           constructor_initial_map);
   2132     HValue* empty_fixed_array = Add<HLoadRoot>(Heap::kEmptyFixedArrayRootIndex);
   2133     Add<HStoreNamedField>(value, HObjectAccess::ForPropertiesPointer(),
   2134                           empty_fixed_array);
   2135     Add<HStoreNamedField>(value, HObjectAccess::ForElementsPointer(),
   2136                           empty_fixed_array);
   2137     Add<HStoreNamedField>(value, HObjectAccess::ForObservableJSObjectOffset(
   2138                                      JSValue::kValueOffset),
   2139                           receiver);
   2140     Push(value);
   2141   }
   2142   if_wrap.Else();
   2143   { Push(receiver); }
   2144   if_wrap.End();
   2145   return Pop();
   2146 }
   2147 
   2148 
   2149 HAllocate* HGraphBuilder::BuildAllocate(
   2150     HValue* object_size,
   2151     HType type,
   2152     InstanceType instance_type,
   2153     HAllocationMode allocation_mode) {
   2154   // Compute the effective allocation size.
   2155   HValue* size = object_size;
   2156   if (allocation_mode.CreateAllocationMementos()) {
   2157     size = AddUncasted<HAdd>(size, Add<HConstant>(AllocationMemento::kSize));
   2158     size->ClearFlag(HValue::kCanOverflow);
   2159   }
   2160 
   2161   // Perform the actual allocation.
   2162   HAllocate* object = Add<HAllocate>(
   2163       size, type, allocation_mode.GetPretenureMode(),
   2164       instance_type, allocation_mode.feedback_site());
   2165 
   2166   // Setup the allocation memento.
   2167   if (allocation_mode.CreateAllocationMementos()) {
   2168     BuildCreateAllocationMemento(
   2169         object, object_size, allocation_mode.current_site());
   2170   }
   2171 
   2172   return object;
   2173 }
   2174 
   2175 
   2176 HValue* HGraphBuilder::BuildAddStringLengths(HValue* left_length,
   2177                                              HValue* right_length) {
   2178   // Compute the combined string length and check against max string length.
   2179   HValue* length = AddUncasted<HAdd>(left_length, right_length);
   2180   // Check that length <= kMaxLength <=> length < MaxLength + 1.
   2181   HValue* max_length = Add<HConstant>(String::kMaxLength + 1);
   2182   Add<HBoundsCheck>(length, max_length);
   2183   return length;
   2184 }
   2185 
   2186 
   2187 HValue* HGraphBuilder::BuildCreateConsString(
   2188     HValue* length,
   2189     HValue* left,
   2190     HValue* right,
   2191     HAllocationMode allocation_mode) {
   2192   // Determine the string instance types.
   2193   HInstruction* left_instance_type = AddLoadStringInstanceType(left);
   2194   HInstruction* right_instance_type = AddLoadStringInstanceType(right);
   2195 
   2196   // Allocate the cons string object. HAllocate does not care whether we
   2197   // pass CONS_STRING_TYPE or CONS_ONE_BYTE_STRING_TYPE here, so we just use
   2198   // CONS_STRING_TYPE here. Below we decide whether the cons string is
   2199   // one-byte or two-byte and set the appropriate map.
   2200   DCHECK(HAllocate::CompatibleInstanceTypes(CONS_STRING_TYPE,
   2201                                             CONS_ONE_BYTE_STRING_TYPE));
   2202   HAllocate* result = BuildAllocate(Add<HConstant>(ConsString::kSize),
   2203                                     HType::String(), CONS_STRING_TYPE,
   2204                                     allocation_mode);
   2205 
   2206   // Compute intersection and difference of instance types.
   2207   HValue* anded_instance_types = AddUncasted<HBitwise>(
   2208       Token::BIT_AND, left_instance_type, right_instance_type);
   2209   HValue* xored_instance_types = AddUncasted<HBitwise>(
   2210       Token::BIT_XOR, left_instance_type, right_instance_type);
   2211 
   2212   // We create a one-byte cons string if
   2213   // 1. both strings are one-byte, or
   2214   // 2. at least one of the strings is two-byte, but happens to contain only
   2215   //    one-byte characters.
   2216   // To do this, we check
   2217   // 1. if both strings are one-byte, or if the one-byte data hint is set in
   2218   //    both strings, or
   2219   // 2. if one of the strings has the one-byte data hint set and the other
   2220   //    string is one-byte.
   2221   IfBuilder if_onebyte(this);
   2222   STATIC_ASSERT(kOneByteStringTag != 0);
   2223   STATIC_ASSERT(kOneByteDataHintMask != 0);
   2224   if_onebyte.If<HCompareNumericAndBranch>(
   2225       AddUncasted<HBitwise>(
   2226           Token::BIT_AND, anded_instance_types,
   2227           Add<HConstant>(static_cast<int32_t>(
   2228                   kStringEncodingMask | kOneByteDataHintMask))),
   2229       graph()->GetConstant0(), Token::NE);
   2230   if_onebyte.Or();
   2231   STATIC_ASSERT(kOneByteStringTag != 0 &&
   2232                 kOneByteDataHintTag != 0 &&
   2233                 kOneByteDataHintTag != kOneByteStringTag);
   2234   if_onebyte.If<HCompareNumericAndBranch>(
   2235       AddUncasted<HBitwise>(
   2236           Token::BIT_AND, xored_instance_types,
   2237           Add<HConstant>(static_cast<int32_t>(
   2238                   kOneByteStringTag | kOneByteDataHintTag))),
   2239       Add<HConstant>(static_cast<int32_t>(
   2240               kOneByteStringTag | kOneByteDataHintTag)), Token::EQ);
   2241   if_onebyte.Then();
   2242   {
   2243     // We can safely skip the write barrier for storing the map here.
   2244     Add<HStoreNamedField>(
   2245         result, HObjectAccess::ForMap(),
   2246         Add<HConstant>(isolate()->factory()->cons_one_byte_string_map()));
   2247   }
   2248   if_onebyte.Else();
   2249   {
   2250     // We can safely skip the write barrier for storing the map here.
   2251     Add<HStoreNamedField>(
   2252         result, HObjectAccess::ForMap(),
   2253         Add<HConstant>(isolate()->factory()->cons_string_map()));
   2254   }
   2255   if_onebyte.End();
   2256 
   2257   // Initialize the cons string fields.
   2258   Add<HStoreNamedField>(result, HObjectAccess::ForStringHashField(),
   2259                         Add<HConstant>(String::kEmptyHashField));
   2260   Add<HStoreNamedField>(result, HObjectAccess::ForStringLength(), length);
   2261   Add<HStoreNamedField>(result, HObjectAccess::ForConsStringFirst(), left);
   2262   Add<HStoreNamedField>(result, HObjectAccess::ForConsStringSecond(), right);
   2263 
   2264   // Count the native string addition.
   2265   AddIncrementCounter(isolate()->counters()->string_add_native());
   2266 
   2267   return result;
   2268 }
   2269 
   2270 
   2271 void HGraphBuilder::BuildCopySeqStringChars(HValue* src,
   2272                                             HValue* src_offset,
   2273                                             String::Encoding src_encoding,
   2274                                             HValue* dst,
   2275                                             HValue* dst_offset,
   2276                                             String::Encoding dst_encoding,
   2277                                             HValue* length) {
   2278   DCHECK(dst_encoding != String::ONE_BYTE_ENCODING ||
   2279          src_encoding == String::ONE_BYTE_ENCODING);
   2280   LoopBuilder loop(this, context(), LoopBuilder::kPostIncrement);
   2281   HValue* index = loop.BeginBody(graph()->GetConstant0(), length, Token::LT);
   2282   {
   2283     HValue* src_index = AddUncasted<HAdd>(src_offset, index);
   2284     HValue* value =
   2285         AddUncasted<HSeqStringGetChar>(src_encoding, src, src_index);
   2286     HValue* dst_index = AddUncasted<HAdd>(dst_offset, index);
   2287     Add<HSeqStringSetChar>(dst_encoding, dst, dst_index, value);
   2288   }
   2289   loop.EndBody();
   2290 }
   2291 
   2292 
   2293 HValue* HGraphBuilder::BuildObjectSizeAlignment(
   2294     HValue* unaligned_size, int header_size) {
   2295   DCHECK((header_size & kObjectAlignmentMask) == 0);
   2296   HValue* size = AddUncasted<HAdd>(
   2297       unaligned_size, Add<HConstant>(static_cast<int32_t>(
   2298           header_size + kObjectAlignmentMask)));
   2299   size->ClearFlag(HValue::kCanOverflow);
   2300   return AddUncasted<HBitwise>(
   2301       Token::BIT_AND, size, Add<HConstant>(static_cast<int32_t>(
   2302           ~kObjectAlignmentMask)));
   2303 }
   2304 
   2305 
   2306 HValue* HGraphBuilder::BuildUncheckedStringAdd(
   2307     HValue* left,
   2308     HValue* right,
   2309     HAllocationMode allocation_mode) {
   2310   // Determine the string lengths.
   2311   HValue* left_length = AddLoadStringLength(left);
   2312   HValue* right_length = AddLoadStringLength(right);
   2313 
   2314   // Compute the combined string length.
   2315   HValue* length = BuildAddStringLengths(left_length, right_length);
   2316 
   2317   // Do some manual constant folding here.
   2318   if (left_length->IsConstant()) {
   2319     HConstant* c_left_length = HConstant::cast(left_length);
   2320     DCHECK_NE(0, c_left_length->Integer32Value());
   2321     if (c_left_length->Integer32Value() + 1 >= ConsString::kMinLength) {
   2322       // The right string contains at least one character.
   2323       return BuildCreateConsString(length, left, right, allocation_mode);
   2324     }
   2325   } else if (right_length->IsConstant()) {
   2326     HConstant* c_right_length = HConstant::cast(right_length);
   2327     DCHECK_NE(0, c_right_length->Integer32Value());
   2328     if (c_right_length->Integer32Value() + 1 >= ConsString::kMinLength) {
   2329       // The left string contains at least one character.
   2330       return BuildCreateConsString(length, left, right, allocation_mode);
   2331     }
   2332   }
   2333 
   2334   // Check if we should create a cons string.
   2335   IfBuilder if_createcons(this);
   2336   if_createcons.If<HCompareNumericAndBranch>(
   2337       length, Add<HConstant>(ConsString::kMinLength), Token::GTE);
   2338   if_createcons.Then();
   2339   {
   2340     // Create a cons string.
   2341     Push(BuildCreateConsString(length, left, right, allocation_mode));
   2342   }
   2343   if_createcons.Else();
   2344   {
   2345     // Determine the string instance types.
   2346     HValue* left_instance_type = AddLoadStringInstanceType(left);
   2347     HValue* right_instance_type = AddLoadStringInstanceType(right);
   2348 
   2349     // Compute union and difference of instance types.
   2350     HValue* ored_instance_types = AddUncasted<HBitwise>(
   2351         Token::BIT_OR, left_instance_type, right_instance_type);
   2352     HValue* xored_instance_types = AddUncasted<HBitwise>(
   2353         Token::BIT_XOR, left_instance_type, right_instance_type);
   2354 
   2355     // Check if both strings have the same encoding and both are
   2356     // sequential.
   2357     IfBuilder if_sameencodingandsequential(this);
   2358     if_sameencodingandsequential.If<HCompareNumericAndBranch>(
   2359         AddUncasted<HBitwise>(
   2360             Token::BIT_AND, xored_instance_types,
   2361             Add<HConstant>(static_cast<int32_t>(kStringEncodingMask))),
   2362         graph()->GetConstant0(), Token::EQ);
   2363     if_sameencodingandsequential.And();
   2364     STATIC_ASSERT(kSeqStringTag == 0);
   2365     if_sameencodingandsequential.If<HCompareNumericAndBranch>(
   2366         AddUncasted<HBitwise>(
   2367             Token::BIT_AND, ored_instance_types,
   2368             Add<HConstant>(static_cast<int32_t>(kStringRepresentationMask))),
   2369         graph()->GetConstant0(), Token::EQ);
   2370     if_sameencodingandsequential.Then();
   2371     {
   2372       HConstant* string_map =
   2373           Add<HConstant>(isolate()->factory()->string_map());
   2374       HConstant* one_byte_string_map =
   2375           Add<HConstant>(isolate()->factory()->one_byte_string_map());
   2376 
   2377       // Determine map and size depending on whether result is one-byte string.
   2378       IfBuilder if_onebyte(this);
   2379       STATIC_ASSERT(kOneByteStringTag != 0);
   2380       if_onebyte.If<HCompareNumericAndBranch>(
   2381           AddUncasted<HBitwise>(
   2382               Token::BIT_AND, ored_instance_types,
   2383               Add<HConstant>(static_cast<int32_t>(kStringEncodingMask))),
   2384           graph()->GetConstant0(), Token::NE);
   2385       if_onebyte.Then();
   2386       {
   2387         // Allocate sequential one-byte string object.
   2388         Push(length);
   2389         Push(one_byte_string_map);
   2390       }
   2391       if_onebyte.Else();
   2392       {
   2393         // Allocate sequential two-byte string object.
   2394         HValue* size = AddUncasted<HShl>(length, graph()->GetConstant1());
   2395         size->ClearFlag(HValue::kCanOverflow);
   2396         size->SetFlag(HValue::kUint32);
   2397         Push(size);
   2398         Push(string_map);
   2399       }
   2400       if_onebyte.End();
   2401       HValue* map = Pop();
   2402 
   2403       // Calculate the number of bytes needed for the characters in the
   2404       // string while observing object alignment.
   2405       STATIC_ASSERT((SeqString::kHeaderSize & kObjectAlignmentMask) == 0);
   2406       HValue* size = BuildObjectSizeAlignment(Pop(), SeqString::kHeaderSize);
   2407 
   2408       IfBuilder if_size(this);
   2409       if_size.If<HCompareNumericAndBranch>(
   2410           size, Add<HConstant>(Page::kMaxRegularHeapObjectSize), Token::LT);
   2411       if_size.Then();
   2412       {
   2413         // Allocate the string object. HAllocate does not care whether we pass
   2414         // STRING_TYPE or ONE_BYTE_STRING_TYPE here, so we just use STRING_TYPE.
   2415         HAllocate* result =
   2416             BuildAllocate(size, HType::String(), STRING_TYPE, allocation_mode);
   2417         Add<HStoreNamedField>(result, HObjectAccess::ForMap(), map);
   2418 
   2419         // Initialize the string fields.
   2420         Add<HStoreNamedField>(result, HObjectAccess::ForStringHashField(),
   2421                               Add<HConstant>(String::kEmptyHashField));
   2422         Add<HStoreNamedField>(result, HObjectAccess::ForStringLength(), length);
   2423 
   2424         // Copy characters to the result string.
   2425         IfBuilder if_twobyte(this);
   2426         if_twobyte.If<HCompareObjectEqAndBranch>(map, string_map);
   2427         if_twobyte.Then();
   2428         {
   2429           // Copy characters from the left string.
   2430           BuildCopySeqStringChars(
   2431               left, graph()->GetConstant0(), String::TWO_BYTE_ENCODING, result,
   2432               graph()->GetConstant0(), String::TWO_BYTE_ENCODING, left_length);
   2433 
   2434           // Copy characters from the right string.
   2435           BuildCopySeqStringChars(
   2436               right, graph()->GetConstant0(), String::TWO_BYTE_ENCODING, result,
   2437               left_length, String::TWO_BYTE_ENCODING, right_length);
   2438         }
   2439         if_twobyte.Else();
   2440         {
   2441           // Copy characters from the left string.
   2442           BuildCopySeqStringChars(
   2443               left, graph()->GetConstant0(), String::ONE_BYTE_ENCODING, result,
   2444               graph()->GetConstant0(), String::ONE_BYTE_ENCODING, left_length);
   2445 
   2446           // Copy characters from the right string.
   2447           BuildCopySeqStringChars(
   2448               right, graph()->GetConstant0(), String::ONE_BYTE_ENCODING, result,
   2449               left_length, String::ONE_BYTE_ENCODING, right_length);
   2450         }
   2451         if_twobyte.End();
   2452 
   2453         // Count the native string addition.
   2454         AddIncrementCounter(isolate()->counters()->string_add_native());
   2455 
   2456         // Return the sequential string.
   2457         Push(result);
   2458       }
   2459       if_size.Else();
   2460       {
   2461         // Fallback to the runtime to add the two strings. The string has to be
   2462         // allocated in LO space.
   2463         Add<HPushArguments>(left, right);
   2464         Push(Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kStringAdd), 2));
   2465       }
   2466       if_size.End();
   2467     }
   2468     if_sameencodingandsequential.Else();
   2469     {
   2470       // Fallback to the runtime to add the two strings.
   2471       Add<HPushArguments>(left, right);
   2472       Push(Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kStringAdd), 2));
   2473     }
   2474     if_sameencodingandsequential.End();
   2475   }
   2476   if_createcons.End();
   2477 
   2478   return Pop();
   2479 }
   2480 
   2481 
   2482 HValue* HGraphBuilder::BuildStringAdd(
   2483     HValue* left,
   2484     HValue* right,
   2485     HAllocationMode allocation_mode) {
   2486   NoObservableSideEffectsScope no_effects(this);
   2487 
   2488   // Determine string lengths.
   2489   HValue* left_length = AddLoadStringLength(left);
   2490   HValue* right_length = AddLoadStringLength(right);
   2491 
   2492   // Check if left string is empty.
   2493   IfBuilder if_leftempty(this);
   2494   if_leftempty.If<HCompareNumericAndBranch>(
   2495       left_length, graph()->GetConstant0(), Token::EQ);
   2496   if_leftempty.Then();
   2497   {
   2498     // Count the native string addition.
   2499     AddIncrementCounter(isolate()->counters()->string_add_native());
   2500 
   2501     // Just return the right string.
   2502     Push(right);
   2503   }
   2504   if_leftempty.Else();
   2505   {
   2506     // Check if right string is empty.
   2507     IfBuilder if_rightempty(this);
   2508     if_rightempty.If<HCompareNumericAndBranch>(
   2509         right_length, graph()->GetConstant0(), Token::EQ);
   2510     if_rightempty.Then();
   2511     {
   2512       // Count the native string addition.
   2513       AddIncrementCounter(isolate()->counters()->string_add_native());
   2514 
   2515       // Just return the left string.
   2516       Push(left);
   2517     }
   2518     if_rightempty.Else();
   2519     {
   2520       // Add the two non-empty strings.
   2521       Push(BuildUncheckedStringAdd(left, right, allocation_mode));
   2522     }
   2523     if_rightempty.End();
   2524   }
   2525   if_leftempty.End();
   2526 
   2527   return Pop();
   2528 }
   2529 
   2530 
   2531 HInstruction* HGraphBuilder::BuildUncheckedMonomorphicElementAccess(
   2532     HValue* checked_object,
   2533     HValue* key,
   2534     HValue* val,
   2535     bool is_js_array,
   2536     ElementsKind elements_kind,
   2537     PropertyAccessType access_type,
   2538     LoadKeyedHoleMode load_mode,
   2539     KeyedAccessStoreMode store_mode) {
   2540   DCHECK(top_info()->IsStub() || checked_object->IsCompareMap() ||
   2541          checked_object->IsCheckMaps());
   2542   DCHECK(!IsFixedTypedArrayElementsKind(elements_kind) || !is_js_array);
   2543   // No GVNFlag is necessary for ElementsKind if there is an explicit dependency
   2544   // on a HElementsTransition instruction. The flag can also be removed if the
   2545   // map to check has FAST_HOLEY_ELEMENTS, since there can be no further
   2546   // ElementsKind transitions. Finally, the dependency can be removed for stores
   2547   // for FAST_ELEMENTS, since a transition to HOLEY elements won't change the
   2548   // generated store code.
   2549   if ((elements_kind == FAST_HOLEY_ELEMENTS) ||
   2550       (elements_kind == FAST_ELEMENTS && access_type == STORE)) {
   2551     checked_object->ClearDependsOnFlag(kElementsKind);
   2552   }
   2553 
   2554   bool fast_smi_only_elements = IsFastSmiElementsKind(elements_kind);
   2555   bool fast_elements = IsFastObjectElementsKind(elements_kind);
   2556   HValue* elements = AddLoadElements(checked_object);
   2557   if (access_type == STORE && (fast_elements || fast_smi_only_elements) &&
   2558       store_mode != STORE_NO_TRANSITION_HANDLE_COW) {
   2559     HCheckMaps* check_cow_map = Add<HCheckMaps>(
   2560         elements, isolate()->factory()->fixed_array_map());
   2561     check_cow_map->ClearDependsOnFlag(kElementsKind);
   2562   }
   2563   HInstruction* length = NULL;
   2564   if (is_js_array) {
   2565     length = Add<HLoadNamedField>(
   2566         checked_object->ActualValue(), checked_object,
   2567         HObjectAccess::ForArrayLength(elements_kind));
   2568   } else {
   2569     length = AddLoadFixedArrayLength(elements);
   2570   }
   2571   length->set_type(HType::Smi());
   2572   HValue* checked_key = NULL;
   2573   if (IsFixedTypedArrayElementsKind(elements_kind)) {
   2574     checked_object = Add<HCheckArrayBufferNotNeutered>(checked_object);
   2575 
   2576     HValue* external_pointer = Add<HLoadNamedField>(
   2577         elements, nullptr,
   2578         HObjectAccess::ForFixedTypedArrayBaseExternalPointer());
   2579     HValue* base_pointer = Add<HLoadNamedField>(
   2580         elements, nullptr, HObjectAccess::ForFixedTypedArrayBaseBasePointer());
   2581     HValue* backing_store = AddUncasted<HAdd>(
   2582         external_pointer, base_pointer, Strength::WEAK, AddOfExternalAndTagged);
   2583 
   2584     if (store_mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS) {
   2585       NoObservableSideEffectsScope no_effects(this);
   2586       IfBuilder length_checker(this);
   2587       length_checker.If<HCompareNumericAndBranch>(key, length, Token::LT);
   2588       length_checker.Then();
   2589       IfBuilder negative_checker(this);
   2590       HValue* bounds_check = negative_checker.If<HCompareNumericAndBranch>(
   2591           key, graph()->GetConstant0(), Token::GTE);
   2592       negative_checker.Then();
   2593       HInstruction* result = AddElementAccess(
   2594           backing_store, key, val, bounds_check, checked_object->ActualValue(),
   2595           elements_kind, access_type);
   2596       negative_checker.ElseDeopt(Deoptimizer::kNegativeKeyEncountered);
   2597       negative_checker.End();
   2598       length_checker.End();
   2599       return result;
   2600     } else {
   2601       DCHECK(store_mode == STANDARD_STORE);
   2602       checked_key = Add<HBoundsCheck>(key, length);
   2603       return AddElementAccess(backing_store, checked_key, val, checked_object,
   2604                               checked_object->ActualValue(), elements_kind,
   2605                               access_type);
   2606     }
   2607   }
   2608   DCHECK(fast_smi_only_elements ||
   2609          fast_elements ||
   2610          IsFastDoubleElementsKind(elements_kind));
   2611 
   2612   // In case val is stored into a fast smi array, assure that the value is a smi
   2613   // before manipulating the backing store. Otherwise the actual store may
   2614   // deopt, leaving the backing store in an invalid state.
   2615   if (access_type == STORE && IsFastSmiElementsKind(elements_kind) &&
   2616       !val->type().IsSmi()) {
   2617     val = AddUncasted<HForceRepresentation>(val, Representation::Smi());
   2618   }
   2619 
   2620   if (IsGrowStoreMode(store_mode)) {
   2621     NoObservableSideEffectsScope no_effects(this);
   2622     Representation representation = HStoreKeyed::RequiredValueRepresentation(
   2623         elements_kind, STORE_TO_INITIALIZED_ENTRY);
   2624     val = AddUncasted<HForceRepresentation>(val, representation);
   2625     elements = BuildCheckForCapacityGrow(checked_object, elements,
   2626                                          elements_kind, length, key,
   2627                                          is_js_array, access_type);
   2628     checked_key = key;
   2629   } else {
   2630     checked_key = Add<HBoundsCheck>(key, length);
   2631 
   2632     if (access_type == STORE && (fast_elements || fast_smi_only_elements)) {
   2633       if (store_mode == STORE_NO_TRANSITION_HANDLE_COW) {
   2634         NoObservableSideEffectsScope no_effects(this);
   2635         elements = BuildCopyElementsOnWrite(checked_object, elements,
   2636                                             elements_kind, length);
   2637       } else {
   2638         HCheckMaps* check_cow_map = Add<HCheckMaps>(
   2639             elements, isolate()->factory()->fixed_array_map());
   2640         check_cow_map->ClearDependsOnFlag(kElementsKind);
   2641       }
   2642     }
   2643   }
   2644   return AddElementAccess(elements, checked_key, val, checked_object, nullptr,
   2645                           elements_kind, access_type, load_mode);
   2646 }
   2647 
   2648 
   2649 HValue* HGraphBuilder::BuildAllocateArrayFromLength(
   2650     JSArrayBuilder* array_builder,
   2651     HValue* length_argument) {
   2652   if (length_argument->IsConstant() &&
   2653       HConstant::cast(length_argument)->HasSmiValue()) {
   2654     int array_length = HConstant::cast(length_argument)->Integer32Value();
   2655     if (array_length == 0) {
   2656       return array_builder->AllocateEmptyArray();
   2657     } else {
   2658       return array_builder->AllocateArray(length_argument,
   2659                                           array_length,
   2660                                           length_argument);
   2661     }
   2662   }
   2663 
   2664   HValue* constant_zero = graph()->GetConstant0();
   2665   HConstant* max_alloc_length =
   2666       Add<HConstant>(JSArray::kInitialMaxFastElementArray);
   2667   HInstruction* checked_length = Add<HBoundsCheck>(length_argument,
   2668                                                    max_alloc_length);
   2669   IfBuilder if_builder(this);
   2670   if_builder.If<HCompareNumericAndBranch>(checked_length, constant_zero,
   2671                                           Token::EQ);
   2672   if_builder.Then();
   2673   const int initial_capacity = JSArray::kPreallocatedArrayElements;
   2674   HConstant* initial_capacity_node = Add<HConstant>(initial_capacity);
   2675   Push(initial_capacity_node);  // capacity
   2676   Push(constant_zero);          // length
   2677   if_builder.Else();
   2678   if (!(top_info()->IsStub()) &&
   2679       IsFastPackedElementsKind(array_builder->kind())) {
   2680     // We'll come back later with better (holey) feedback.
   2681     if_builder.Deopt(
   2682         Deoptimizer::kHoleyArrayDespitePackedElements_kindFeedback);
   2683   } else {
   2684     Push(checked_length);         // capacity
   2685     Push(checked_length);         // length
   2686   }
   2687   if_builder.End();
   2688 
   2689   // Figure out total size
   2690   HValue* length = Pop();
   2691   HValue* capacity = Pop();
   2692   return array_builder->AllocateArray(capacity, max_alloc_length, length);
   2693 }
   2694 
   2695 
   2696 HValue* HGraphBuilder::BuildCalculateElementsSize(ElementsKind kind,
   2697                                                   HValue* capacity) {
   2698   int elements_size = IsFastDoubleElementsKind(kind)
   2699       ? kDoubleSize
   2700       : kPointerSize;
   2701 
   2702   HConstant* elements_size_value = Add<HConstant>(elements_size);
   2703   HInstruction* mul =
   2704       HMul::NewImul(isolate(), zone(), context(), capacity->ActualValue(),
   2705                     elements_size_value);
   2706   AddInstruction(mul);
   2707   mul->ClearFlag(HValue::kCanOverflow);
   2708 
   2709   STATIC_ASSERT(FixedDoubleArray::kHeaderSize == FixedArray::kHeaderSize);
   2710 
   2711   HConstant* header_size = Add<HConstant>(FixedArray::kHeaderSize);
   2712   HValue* total_size = AddUncasted<HAdd>(mul, header_size);
   2713   total_size->ClearFlag(HValue::kCanOverflow);
   2714   return total_size;
   2715 }
   2716 
   2717 
   2718 HAllocate* HGraphBuilder::AllocateJSArrayObject(AllocationSiteMode mode) {
   2719   int base_size = JSArray::kSize;
   2720   if (mode == TRACK_ALLOCATION_SITE) {
   2721     base_size += AllocationMemento::kSize;
   2722   }
   2723   HConstant* size_in_bytes = Add<HConstant>(base_size);
   2724   return Add<HAllocate>(
   2725       size_in_bytes, HType::JSArray(), NOT_TENURED, JS_OBJECT_TYPE);
   2726 }
   2727 
   2728 
   2729 HConstant* HGraphBuilder::EstablishElementsAllocationSize(
   2730     ElementsKind kind,
   2731     int capacity) {
   2732   int base_size = IsFastDoubleElementsKind(kind)
   2733       ? FixedDoubleArray::SizeFor(capacity)
   2734       : FixedArray::SizeFor(capacity);
   2735 
   2736   return Add<HConstant>(base_size);
   2737 }
   2738 
   2739 
   2740 HAllocate* HGraphBuilder::BuildAllocateElements(ElementsKind kind,
   2741                                                 HValue* size_in_bytes) {
   2742   InstanceType instance_type = IsFastDoubleElementsKind(kind)
   2743       ? FIXED_DOUBLE_ARRAY_TYPE
   2744       : FIXED_ARRAY_TYPE;
   2745 
   2746   return Add<HAllocate>(size_in_bytes, HType::HeapObject(), NOT_TENURED,
   2747                         instance_type);
   2748 }
   2749 
   2750 
   2751 void HGraphBuilder::BuildInitializeElementsHeader(HValue* elements,
   2752                                                   ElementsKind kind,
   2753                                                   HValue* capacity) {
   2754   Factory* factory = isolate()->factory();
   2755   Handle<Map> map = IsFastDoubleElementsKind(kind)
   2756       ? factory->fixed_double_array_map()
   2757       : factory->fixed_array_map();
   2758 
   2759   Add<HStoreNamedField>(elements, HObjectAccess::ForMap(), Add<HConstant>(map));
   2760   Add<HStoreNamedField>(elements, HObjectAccess::ForFixedArrayLength(),
   2761                         capacity);
   2762 }
   2763 
   2764 
   2765 HValue* HGraphBuilder::BuildAllocateAndInitializeArray(ElementsKind kind,
   2766                                                        HValue* capacity) {
   2767   // The HForceRepresentation is to prevent possible deopt on int-smi
   2768   // conversion after allocation but before the new object fields are set.
   2769   capacity = AddUncasted<HForceRepresentation>(capacity, Representation::Smi());
   2770   HValue* size_in_bytes = BuildCalculateElementsSize(kind, capacity);
   2771   HValue* new_array = BuildAllocateElements(kind, size_in_bytes);
   2772   BuildInitializeElementsHeader(new_array, kind, capacity);
   2773   return new_array;
   2774 }
   2775 
   2776 
   2777 void HGraphBuilder::BuildJSArrayHeader(HValue* array,
   2778                                        HValue* array_map,
   2779                                        HValue* elements,
   2780                                        AllocationSiteMode mode,
   2781                                        ElementsKind elements_kind,
   2782                                        HValue* allocation_site_payload,
   2783                                        HValue* length_field) {
   2784   Add<HStoreNamedField>(array, HObjectAccess::ForMap(), array_map);
   2785 
   2786   HConstant* empty_fixed_array =
   2787     Add<HConstant>(isolate()->factory()->empty_fixed_array());
   2788 
   2789   Add<HStoreNamedField>(
   2790       array, HObjectAccess::ForPropertiesPointer(), empty_fixed_array);
   2791 
   2792   Add<HStoreNamedField>(
   2793       array, HObjectAccess::ForElementsPointer(),
   2794       elements != NULL ? elements : empty_fixed_array);
   2795 
   2796   Add<HStoreNamedField>(
   2797       array, HObjectAccess::ForArrayLength(elements_kind), length_field);
   2798 
   2799   if (mode == TRACK_ALLOCATION_SITE) {
   2800     BuildCreateAllocationMemento(
   2801         array, Add<HConstant>(JSArray::kSize), allocation_site_payload);
   2802   }
   2803 }
   2804 
   2805 
   2806 HInstruction* HGraphBuilder::AddElementAccess(
   2807     HValue* elements, HValue* checked_key, HValue* val, HValue* dependency,
   2808     HValue* backing_store_owner, ElementsKind elements_kind,
   2809     PropertyAccessType access_type, LoadKeyedHoleMode load_mode) {
   2810   if (access_type == STORE) {
   2811     DCHECK(val != NULL);
   2812     if (elements_kind == UINT8_CLAMPED_ELEMENTS) {
   2813       val = Add<HClampToUint8>(val);
   2814     }
   2815     return Add<HStoreKeyed>(elements, checked_key, val, backing_store_owner,
   2816                             elements_kind, STORE_TO_INITIALIZED_ENTRY);
   2817   }
   2818 
   2819   DCHECK(access_type == LOAD);
   2820   DCHECK(val == NULL);
   2821   HLoadKeyed* load =
   2822       Add<HLoadKeyed>(elements, checked_key, dependency, backing_store_owner,
   2823                       elements_kind, load_mode);
   2824   if (elements_kind == UINT32_ELEMENTS) {
   2825     graph()->RecordUint32Instruction(load);
   2826   }
   2827   return load;
   2828 }
   2829 
   2830 
   2831 HLoadNamedField* HGraphBuilder::AddLoadMap(HValue* object,
   2832                                            HValue* dependency) {
   2833   return Add<HLoadNamedField>(object, dependency, HObjectAccess::ForMap());
   2834 }
   2835 
   2836 
   2837 HLoadNamedField* HGraphBuilder::AddLoadElements(HValue* object,
   2838                                                 HValue* dependency) {
   2839   return Add<HLoadNamedField>(
   2840       object, dependency, HObjectAccess::ForElementsPointer());
   2841 }
   2842 
   2843 
   2844 HLoadNamedField* HGraphBuilder::AddLoadFixedArrayLength(
   2845     HValue* array,
   2846     HValue* dependency) {
   2847   return Add<HLoadNamedField>(
   2848       array, dependency, HObjectAccess::ForFixedArrayLength());
   2849 }
   2850 
   2851 
   2852 HLoadNamedField* HGraphBuilder::AddLoadArrayLength(HValue* array,
   2853                                                    ElementsKind kind,
   2854                                                    HValue* dependency) {
   2855   return Add<HLoadNamedField>(
   2856       array, dependency, HObjectAccess::ForArrayLength(kind));
   2857 }
   2858 
   2859 
   2860 HValue* HGraphBuilder::BuildNewElementsCapacity(HValue* old_capacity) {
   2861   HValue* half_old_capacity = AddUncasted<HShr>(old_capacity,
   2862                                                 graph_->GetConstant1());
   2863 
   2864   HValue* new_capacity = AddUncasted<HAdd>(half_old_capacity, old_capacity);
   2865   new_capacity->ClearFlag(HValue::kCanOverflow);
   2866 
   2867   HValue* min_growth = Add<HConstant>(16);
   2868 
   2869   new_capacity = AddUncasted<HAdd>(new_capacity, min_growth);
   2870   new_capacity->ClearFlag(HValue::kCanOverflow);
   2871 
   2872   return new_capacity;
   2873 }
   2874 
   2875 
   2876 HValue* HGraphBuilder::BuildGrowElementsCapacity(HValue* object,
   2877                                                  HValue* elements,
   2878                                                  ElementsKind kind,
   2879                                                  ElementsKind new_kind,
   2880                                                  HValue* length,
   2881                                                  HValue* new_capacity) {
   2882   Add<HBoundsCheck>(new_capacity, Add<HConstant>(
   2883           (Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) >>
   2884           ElementsKindToShiftSize(new_kind)));
   2885 
   2886   HValue* new_elements =
   2887       BuildAllocateAndInitializeArray(new_kind, new_capacity);
   2888 
   2889   BuildCopyElements(elements, kind, new_elements,
   2890                     new_kind, length, new_capacity);
   2891 
   2892   Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
   2893                         new_elements);
   2894 
   2895   return new_elements;
   2896 }
   2897 
   2898 
   2899 void HGraphBuilder::BuildFillElementsWithValue(HValue* elements,
   2900                                                ElementsKind elements_kind,
   2901                                                HValue* from,
   2902                                                HValue* to,
   2903                                                HValue* value) {
   2904   if (to == NULL) {
   2905     to = AddLoadFixedArrayLength(elements);
   2906   }
   2907 
   2908   // Special loop unfolding case
   2909   STATIC_ASSERT(JSArray::kPreallocatedArrayElements <=
   2910                 kElementLoopUnrollThreshold);
   2911   int initial_capacity = -1;
   2912   if (from->IsInteger32Constant() && to->IsInteger32Constant()) {
   2913     int constant_from = from->GetInteger32Constant();
   2914     int constant_to = to->GetInteger32Constant();
   2915 
   2916     if (constant_from == 0 && constant_to <= kElementLoopUnrollThreshold) {
   2917       initial_capacity = constant_to;
   2918     }
   2919   }
   2920 
   2921   if (initial_capacity >= 0) {
   2922     for (int i = 0; i < initial_capacity; i++) {
   2923       HInstruction* key = Add<HConstant>(i);
   2924       Add<HStoreKeyed>(elements, key, value, nullptr, elements_kind);
   2925     }
   2926   } else {
   2927     // Carefully loop backwards so that the "from" remains live through the loop
   2928     // rather than the to. This often corresponds to keeping length live rather
   2929     // then capacity, which helps register allocation, since length is used more
   2930     // other than capacity after filling with holes.
   2931     LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement);
   2932 
   2933     HValue* key = builder.BeginBody(to, from, Token::GT);
   2934 
   2935     HValue* adjusted_key = AddUncasted<HSub>(key, graph()->GetConstant1());
   2936     adjusted_key->ClearFlag(HValue::kCanOverflow);
   2937 
   2938     Add<HStoreKeyed>(elements, adjusted_key, value, nullptr, elements_kind);
   2939 
   2940     builder.EndBody();
   2941   }
   2942 }
   2943 
   2944 
   2945 void HGraphBuilder::BuildFillElementsWithHole(HValue* elements,
   2946                                               ElementsKind elements_kind,
   2947                                               HValue* from,
   2948                                               HValue* to) {
   2949   // Fast elements kinds need to be initialized in case statements below cause a
   2950   // garbage collection.
   2951 
   2952   HValue* hole = IsFastSmiOrObjectElementsKind(elements_kind)
   2953                      ? graph()->GetConstantHole()
   2954                      : Add<HConstant>(HConstant::kHoleNaN);
   2955 
   2956   // Since we're about to store a hole value, the store instruction below must
   2957   // assume an elements kind that supports heap object values.
   2958   if (IsFastSmiOrObjectElementsKind(elements_kind)) {
   2959     elements_kind = FAST_HOLEY_ELEMENTS;
   2960   }
   2961 
   2962   BuildFillElementsWithValue(elements, elements_kind, from, to, hole);
   2963 }
   2964 
   2965 
   2966 void HGraphBuilder::BuildCopyProperties(HValue* from_properties,
   2967                                         HValue* to_properties, HValue* length,
   2968                                         HValue* capacity) {
   2969   ElementsKind kind = FAST_ELEMENTS;
   2970 
   2971   BuildFillElementsWithValue(to_properties, kind, length, capacity,
   2972                              graph()->GetConstantUndefined());
   2973 
   2974   LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement);
   2975 
   2976   HValue* key = builder.BeginBody(length, graph()->GetConstant0(), Token::GT);
   2977 
   2978   key = AddUncasted<HSub>(key, graph()->GetConstant1());
   2979   key->ClearFlag(HValue::kCanOverflow);
   2980 
   2981   HValue* element =
   2982       Add<HLoadKeyed>(from_properties, key, nullptr, nullptr, kind);
   2983 
   2984   Add<HStoreKeyed>(to_properties, key, element, nullptr, kind);
   2985 
   2986   builder.EndBody();
   2987 }
   2988 
   2989 
   2990 void HGraphBuilder::BuildCopyElements(HValue* from_elements,
   2991                                       ElementsKind from_elements_kind,
   2992                                       HValue* to_elements,
   2993                                       ElementsKind to_elements_kind,
   2994                                       HValue* length,
   2995                                       HValue* capacity) {
   2996   int constant_capacity = -1;
   2997   if (capacity != NULL &&
   2998       capacity->IsConstant() &&
   2999       HConstant::cast(capacity)->HasInteger32Value()) {
   3000     int constant_candidate = HConstant::cast(capacity)->Integer32Value();
   3001     if (constant_candidate <= kElementLoopUnrollThreshold) {
   3002       constant_capacity = constant_candidate;
   3003     }
   3004   }
   3005 
   3006   bool pre_fill_with_holes =
   3007     IsFastDoubleElementsKind(from_elements_kind) &&
   3008     IsFastObjectElementsKind(to_elements_kind);
   3009   if (pre_fill_with_holes) {
   3010     // If the copy might trigger a GC, make sure that the FixedArray is
   3011     // pre-initialized with holes to make sure that it's always in a
   3012     // consistent state.
   3013     BuildFillElementsWithHole(to_elements, to_elements_kind,
   3014                               graph()->GetConstant0(), NULL);
   3015   }
   3016 
   3017   if (constant_capacity != -1) {
   3018     // Unroll the loop for small elements kinds.
   3019     for (int i = 0; i < constant_capacity; i++) {
   3020       HValue* key_constant = Add<HConstant>(i);
   3021       HInstruction* value = Add<HLoadKeyed>(
   3022           from_elements, key_constant, nullptr, nullptr, from_elements_kind);
   3023       Add<HStoreKeyed>(to_elements, key_constant, value, nullptr,
   3024                        to_elements_kind);
   3025     }
   3026   } else {
   3027     if (!pre_fill_with_holes &&
   3028         (capacity == NULL || !length->Equals(capacity))) {
   3029       BuildFillElementsWithHole(to_elements, to_elements_kind,
   3030                                 length, NULL);
   3031     }
   3032 
   3033     LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement);
   3034 
   3035     HValue* key = builder.BeginBody(length, graph()->GetConstant0(),
   3036                                     Token::GT);
   3037 
   3038     key = AddUncasted<HSub>(key, graph()->GetConstant1());
   3039     key->ClearFlag(HValue::kCanOverflow);
   3040 
   3041     HValue* element = Add<HLoadKeyed>(from_elements, key, nullptr, nullptr,
   3042                                       from_elements_kind, ALLOW_RETURN_HOLE);
   3043 
   3044     ElementsKind kind = (IsHoleyElementsKind(from_elements_kind) &&
   3045                          IsFastSmiElementsKind(to_elements_kind))
   3046       ? FAST_HOLEY_ELEMENTS : to_elements_kind;
   3047 
   3048     if (IsHoleyElementsKind(from_elements_kind) &&
   3049         from_elements_kind != to_elements_kind) {
   3050       IfBuilder if_hole(this);
   3051       if_hole.If<HCompareHoleAndBranch>(element);
   3052       if_hole.Then();
   3053       HConstant* hole_constant = IsFastDoubleElementsKind(to_elements_kind)
   3054                                      ? Add<HConstant>(HConstant::kHoleNaN)
   3055                                      : graph()->GetConstantHole();
   3056       Add<HStoreKeyed>(to_elements, key, hole_constant, nullptr, kind);
   3057       if_hole.Else();
   3058       HStoreKeyed* store =
   3059           Add<HStoreKeyed>(to_elements, key, element, nullptr, kind);
   3060       store->SetFlag(HValue::kAllowUndefinedAsNaN);
   3061       if_hole.End();
   3062     } else {
   3063       HStoreKeyed* store =
   3064           Add<HStoreKeyed>(to_elements, key, element, nullptr, kind);
   3065       store->SetFlag(HValue::kAllowUndefinedAsNaN);
   3066     }
   3067 
   3068     builder.EndBody();
   3069   }
   3070 
   3071   Counters* counters = isolate()->counters();
   3072   AddIncrementCounter(counters->inlined_copied_elements());
   3073 }
   3074 
   3075 
   3076 HValue* HGraphBuilder::BuildCloneShallowArrayCow(HValue* boilerplate,
   3077                                                  HValue* allocation_site,
   3078                                                  AllocationSiteMode mode,
   3079                                                  ElementsKind kind) {
   3080   HAllocate* array = AllocateJSArrayObject(mode);
   3081 
   3082   HValue* map = AddLoadMap(boilerplate);
   3083   HValue* elements = AddLoadElements(boilerplate);
   3084   HValue* length = AddLoadArrayLength(boilerplate, kind);
   3085 
   3086   BuildJSArrayHeader(array,
   3087                      map,
   3088                      elements,
   3089                      mode,
   3090                      FAST_ELEMENTS,
   3091                      allocation_site,
   3092                      length);
   3093   return array;
   3094 }
   3095 
   3096 
   3097 HValue* HGraphBuilder::BuildCloneShallowArrayEmpty(HValue* boilerplate,
   3098                                                    HValue* allocation_site,
   3099                                                    AllocationSiteMode mode) {
   3100   HAllocate* array = AllocateJSArrayObject(mode);
   3101 
   3102   HValue* map = AddLoadMap(boilerplate);
   3103 
   3104   BuildJSArrayHeader(array,
   3105                      map,
   3106                      NULL,  // set elements to empty fixed array
   3107                      mode,
   3108                      FAST_ELEMENTS,
   3109                      allocation_site,
   3110                      graph()->GetConstant0());
   3111   return array;
   3112 }
   3113 
   3114 
   3115 HValue* HGraphBuilder::BuildCloneShallowArrayNonEmpty(HValue* boilerplate,
   3116                                                       HValue* allocation_site,
   3117                                                       AllocationSiteMode mode,
   3118                                                       ElementsKind kind) {
   3119   HValue* boilerplate_elements = AddLoadElements(boilerplate);
   3120   HValue* capacity = AddLoadFixedArrayLength(boilerplate_elements);
   3121 
   3122   // Generate size calculation code here in order to make it dominate
   3123   // the JSArray allocation.
   3124   HValue* elements_size = BuildCalculateElementsSize(kind, capacity);
   3125 
   3126   // Create empty JSArray object for now, store elimination should remove
   3127   // redundant initialization of elements and length fields and at the same
   3128   // time the object will be fully prepared for GC if it happens during
   3129   // elements allocation.
   3130   HValue* result = BuildCloneShallowArrayEmpty(
   3131       boilerplate, allocation_site, mode);
   3132 
   3133   HAllocate* elements = BuildAllocateElements(kind, elements_size);
   3134 
   3135   // This function implicitly relies on the fact that the
   3136   // FastCloneShallowArrayStub is called only for literals shorter than
   3137   // JSArray::kInitialMaxFastElementArray.
   3138   // Can't add HBoundsCheck here because otherwise the stub will eager a frame.
   3139   HConstant* size_upper_bound = EstablishElementsAllocationSize(
   3140       kind, JSArray::kInitialMaxFastElementArray);
   3141   elements->set_size_upper_bound(size_upper_bound);
   3142 
   3143   Add<HStoreNamedField>(result, HObjectAccess::ForElementsPointer(), elements);
   3144 
   3145   // The allocation for the cloned array above causes register pressure on
   3146   // machines with low register counts. Force a reload of the boilerplate
   3147   // elements here to free up a register for the allocation to avoid unnecessary
   3148   // spillage.
   3149   boilerplate_elements = AddLoadElements(boilerplate);
   3150   boilerplate_elements->SetFlag(HValue::kCantBeReplaced);
   3151 
   3152   // Copy the elements array header.
   3153   for (int i = 0; i < FixedArrayBase::kHeaderSize; i += kPointerSize) {
   3154     HObjectAccess access = HObjectAccess::ForFixedArrayHeader(i);
   3155     Add<HStoreNamedField>(
   3156         elements, access,
   3157         Add<HLoadNamedField>(boilerplate_elements, nullptr, access));
   3158   }
   3159 
   3160   // And the result of the length
   3161   HValue* length = AddLoadArrayLength(boilerplate, kind);
   3162   Add<HStoreNamedField>(result, HObjectAccess::ForArrayLength(kind), length);
   3163 
   3164   BuildCopyElements(boilerplate_elements, kind, elements,
   3165                     kind, length, NULL);
   3166   return result;
   3167 }
   3168 
   3169 
   3170 void HGraphBuilder::BuildCompareNil(HValue* value, Type* type,
   3171                                     HIfContinuation* continuation,
   3172                                     MapEmbedding map_embedding) {
   3173   IfBuilder if_nil(this);
   3174   bool some_case_handled = false;
   3175   bool some_case_missing = false;
   3176 
   3177   if (type->Maybe(Type::Null())) {
   3178     if (some_case_handled) if_nil.Or();
   3179     if_nil.If<HCompareObjectEqAndBranch>(value, graph()->GetConstantNull());
   3180     some_case_handled = true;
   3181   } else {
   3182     some_case_missing = true;
   3183   }
   3184 
   3185   if (type->Maybe(Type::Undefined())) {
   3186     if (some_case_handled) if_nil.Or();
   3187     if_nil.If<HCompareObjectEqAndBranch>(value,
   3188                                          graph()->GetConstantUndefined());
   3189     some_case_handled = true;
   3190   } else {
   3191     some_case_missing = true;
   3192   }
   3193 
   3194   if (type->Maybe(Type::Undetectable())) {
   3195     if (some_case_handled) if_nil.Or();
   3196     if_nil.If<HIsUndetectableAndBranch>(value);
   3197     some_case_handled = true;
   3198   } else {
   3199     some_case_missing = true;
   3200   }
   3201 
   3202   if (some_case_missing) {
   3203     if_nil.Then();
   3204     if_nil.Else();
   3205     if (type->NumClasses() == 1) {
   3206       BuildCheckHeapObject(value);
   3207       // For ICs, the map checked below is a sentinel map that gets replaced by
   3208       // the monomorphic map when the code is used as a template to generate a
   3209       // new IC. For optimized functions, there is no sentinel map, the map
   3210       // emitted below is the actual monomorphic map.
   3211       if (map_embedding == kEmbedMapsViaWeakCells) {
   3212         HValue* cell =
   3213             Add<HConstant>(Map::WeakCellForMap(type->Classes().Current()));
   3214         HValue* expected_map = Add<HLoadNamedField>(
   3215             cell, nullptr, HObjectAccess::ForWeakCellValue());
   3216         HValue* map =
   3217             Add<HLoadNamedField>(value, nullptr, HObjectAccess::ForMap());
   3218         IfBuilder map_check(this);
   3219         map_check.IfNot<HCompareObjectEqAndBranch>(expected_map, map);
   3220         map_check.ThenDeopt(Deoptimizer::kUnknownMap);
   3221         map_check.End();
   3222       } else {
   3223         DCHECK(map_embedding == kEmbedMapsDirectly);
   3224         Add<HCheckMaps>(value, type->Classes().Current());
   3225       }
   3226     } else {
   3227       if_nil.Deopt(Deoptimizer::kTooManyUndetectableTypes);
   3228     }
   3229   }
   3230 
   3231   if_nil.CaptureContinuation(continuation);
   3232 }
   3233 
   3234 
   3235 void HGraphBuilder::BuildCreateAllocationMemento(
   3236     HValue* previous_object,
   3237     HValue* previous_object_size,
   3238     HValue* allocation_site) {
   3239   DCHECK(allocation_site != NULL);
   3240   HInnerAllocatedObject* allocation_memento = Add<HInnerAllocatedObject>(
   3241       previous_object, previous_object_size, HType::HeapObject());
   3242   AddStoreMapConstant(
   3243       allocation_memento, isolate()->factory()->allocation_memento_map());
   3244   Add<HStoreNamedField>(
   3245       allocation_memento,
   3246       HObjectAccess::ForAllocationMementoSite(),
   3247       allocation_site);
   3248   if (FLAG_allocation_site_pretenuring) {
   3249     HValue* memento_create_count =
   3250         Add<HLoadNamedField>(allocation_site, nullptr,
   3251                              HObjectAccess::ForAllocationSiteOffset(
   3252                                  AllocationSite::kPretenureCreateCountOffset));
   3253     memento_create_count = AddUncasted<HAdd>(
   3254         memento_create_count, graph()->GetConstant1());
   3255     // This smi value is reset to zero after every gc, overflow isn't a problem
   3256     // since the counter is bounded by the new space size.
   3257     memento_create_count->ClearFlag(HValue::kCanOverflow);
   3258     Add<HStoreNamedField>(
   3259         allocation_site, HObjectAccess::ForAllocationSiteOffset(
   3260             AllocationSite::kPretenureCreateCountOffset), memento_create_count);
   3261   }
   3262 }
   3263 
   3264 
   3265 HInstruction* HGraphBuilder::BuildGetNativeContext() {
   3266   return Add<HLoadNamedField>(
   3267       context(), nullptr,
   3268       HObjectAccess::ForContextSlot(Context::NATIVE_CONTEXT_INDEX));
   3269 }
   3270 
   3271 
   3272 HInstruction* HGraphBuilder::BuildGetNativeContext(HValue* closure) {
   3273   // Get the global object, then the native context
   3274   HInstruction* context = Add<HLoadNamedField>(
   3275       closure, nullptr, HObjectAccess::ForFunctionContextPointer());
   3276   return Add<HLoadNamedField>(
   3277       context, nullptr,
   3278       HObjectAccess::ForContextSlot(Context::NATIVE_CONTEXT_INDEX));
   3279 }
   3280 
   3281 
   3282 HInstruction* HGraphBuilder::BuildGetScriptContext(int context_index) {
   3283   HValue* native_context = BuildGetNativeContext();
   3284   HValue* script_context_table = Add<HLoadNamedField>(
   3285       native_context, nullptr,
   3286       HObjectAccess::ForContextSlot(Context::SCRIPT_CONTEXT_TABLE_INDEX));
   3287   return Add<HLoadNamedField>(script_context_table, nullptr,
   3288                               HObjectAccess::ForScriptContext(context_index));
   3289 }
   3290 
   3291 
   3292 HValue* HGraphBuilder::BuildGetParentContext(HValue* depth, int depth_value) {
   3293   HValue* script_context = context();
   3294   if (depth != NULL) {
   3295     HValue* zero = graph()->GetConstant0();
   3296 
   3297     Push(script_context);
   3298     Push(depth);
   3299 
   3300     LoopBuilder loop(this);
   3301     loop.BeginBody(2);  // Drop script_context and depth from last environment
   3302                         // to appease live range building without simulates.
   3303     depth = Pop();
   3304     script_context = Pop();
   3305 
   3306     script_context = Add<HLoadNamedField>(
   3307         script_context, nullptr,
   3308         HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
   3309     depth = AddUncasted<HSub>(depth, graph()->GetConstant1());
   3310     depth->ClearFlag(HValue::kCanOverflow);
   3311 
   3312     IfBuilder if_break(this);
   3313     if_break.If<HCompareNumericAndBranch, HValue*>(depth, zero, Token::EQ);
   3314     if_break.Then();
   3315     {
   3316       Push(script_context);  // The result.
   3317       loop.Break();
   3318     }
   3319     if_break.Else();
   3320     {
   3321       Push(script_context);
   3322       Push(depth);
   3323     }
   3324     loop.EndBody();
   3325     if_break.End();
   3326 
   3327     script_context = Pop();
   3328   } else if (depth_value > 0) {
   3329     // Unroll the above loop.
   3330     for (int i = 0; i < depth_value; i++) {
   3331       script_context = Add<HLoadNamedField>(
   3332           script_context, nullptr,
   3333           HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
   3334     }
   3335   }
   3336   return script_context;
   3337 }
   3338 
   3339 
   3340 HInstruction* HGraphBuilder::BuildGetArrayFunction() {
   3341   HInstruction* native_context = BuildGetNativeContext();
   3342   HInstruction* index =
   3343       Add<HConstant>(static_cast<int32_t>(Context::ARRAY_FUNCTION_INDEX));
   3344   return Add<HLoadKeyed>(native_context, index, nullptr, nullptr,
   3345                          FAST_ELEMENTS);
   3346 }
   3347 
   3348 
   3349 HValue* HGraphBuilder::BuildArrayBufferViewFieldAccessor(HValue* object,
   3350                                                          HValue* checked_object,
   3351                                                          FieldIndex index) {
   3352   NoObservableSideEffectsScope scope(this);
   3353   HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(
   3354       index.offset(), Representation::Tagged());
   3355   HInstruction* buffer = Add<HLoadNamedField>(
   3356       object, checked_object, HObjectAccess::ForJSArrayBufferViewBuffer());
   3357   HInstruction* field = Add<HLoadNamedField>(object, checked_object, access);
   3358 
   3359   HInstruction* flags = Add<HLoadNamedField>(
   3360       buffer, nullptr, HObjectAccess::ForJSArrayBufferBitField());
   3361   HValue* was_neutered_mask =
   3362       Add<HConstant>(1 << JSArrayBuffer::WasNeutered::kShift);
   3363   HValue* was_neutered_test =
   3364       AddUncasted<HBitwise>(Token::BIT_AND, flags, was_neutered_mask);
   3365 
   3366   IfBuilder if_was_neutered(this);
   3367   if_was_neutered.If<HCompareNumericAndBranch>(
   3368       was_neutered_test, graph()->GetConstant0(), Token::NE);
   3369   if_was_neutered.Then();
   3370   Push(graph()->GetConstant0());
   3371   if_was_neutered.Else();
   3372   Push(field);
   3373   if_was_neutered.End();
   3374 
   3375   return Pop();
   3376 }
   3377 
   3378 
   3379 HGraphBuilder::JSArrayBuilder::JSArrayBuilder(HGraphBuilder* builder,
   3380     ElementsKind kind,
   3381     HValue* allocation_site_payload,
   3382     HValue* constructor_function,
   3383     AllocationSiteOverrideMode override_mode) :
   3384         builder_(builder),
   3385         kind_(kind),
   3386         allocation_site_payload_(allocation_site_payload),
   3387         constructor_function_(constructor_function) {
   3388   DCHECK(!allocation_site_payload->IsConstant() ||
   3389          HConstant::cast(allocation_site_payload)->handle(
   3390              builder_->isolate())->IsAllocationSite());
   3391   mode_ = override_mode == DISABLE_ALLOCATION_SITES
   3392       ? DONT_TRACK_ALLOCATION_SITE
   3393       : AllocationSite::GetMode(kind);
   3394 }
   3395 
   3396 
   3397 HGraphBuilder::JSArrayBuilder::JSArrayBuilder(HGraphBuilder* builder,
   3398                                               ElementsKind kind,
   3399                                               HValue* constructor_function) :
   3400     builder_(builder),
   3401     kind_(kind),
   3402     mode_(DONT_TRACK_ALLOCATION_SITE),
   3403     allocation_site_payload_(NULL),
   3404     constructor_function_(constructor_function) {
   3405 }
   3406 
   3407 
   3408 HValue* HGraphBuilder::JSArrayBuilder::EmitMapCode() {
   3409   if (!builder()->top_info()->IsStub()) {
   3410     // A constant map is fine.
   3411     Handle<Map> map(builder()->isolate()->get_initial_js_array_map(kind_),
   3412                     builder()->isolate());
   3413     return builder()->Add<HConstant>(map);
   3414   }
   3415 
   3416   if (constructor_function_ != NULL && kind_ == GetInitialFastElementsKind()) {
   3417     // No need for a context lookup if the kind_ matches the initial
   3418     // map, because we can just load the map in that case.
   3419     HObjectAccess access = HObjectAccess::ForPrototypeOrInitialMap();
   3420     return builder()->Add<HLoadNamedField>(constructor_function_, nullptr,
   3421                                            access);
   3422   }
   3423 
   3424   // TODO(mvstanton): we should always have a constructor function if we
   3425   // are creating a stub.
   3426   HInstruction* native_context = constructor_function_ != NULL
   3427       ? builder()->BuildGetNativeContext(constructor_function_)
   3428       : builder()->BuildGetNativeContext();
   3429 
   3430   HObjectAccess access =
   3431       HObjectAccess::ForContextSlot(Context::ArrayMapIndex(kind_));
   3432   return builder()->Add<HLoadNamedField>(native_context, nullptr, access);
   3433 }
   3434 
   3435 
   3436 HValue* HGraphBuilder::JSArrayBuilder::EmitInternalMapCode() {
   3437   // Find the map near the constructor function
   3438   HObjectAccess access = HObjectAccess::ForPrototypeOrInitialMap();
   3439   return builder()->Add<HLoadNamedField>(constructor_function_, nullptr,
   3440                                          access);
   3441 }
   3442 
   3443 
   3444 HAllocate* HGraphBuilder::JSArrayBuilder::AllocateEmptyArray() {
   3445   HConstant* capacity = builder()->Add<HConstant>(initial_capacity());
   3446   return AllocateArray(capacity,
   3447                        capacity,
   3448                        builder()->graph()->GetConstant0());
   3449 }
   3450 
   3451 
   3452 HAllocate* HGraphBuilder::JSArrayBuilder::AllocateArray(
   3453     HValue* capacity,
   3454     HConstant* capacity_upper_bound,
   3455     HValue* length_field,
   3456     FillMode fill_mode) {
   3457   return AllocateArray(capacity,
   3458                        capacity_upper_bound->GetInteger32Constant(),
   3459                        length_field,
   3460                        fill_mode);
   3461 }
   3462 
   3463 
   3464 HAllocate* HGraphBuilder::JSArrayBuilder::AllocateArray(
   3465     HValue* capacity,
   3466     int capacity_upper_bound,
   3467     HValue* length_field,
   3468     FillMode fill_mode) {
   3469   HConstant* elememts_size_upper_bound = capacity->IsInteger32Constant()
   3470       ? HConstant::cast(capacity)
   3471       : builder()->EstablishElementsAllocationSize(kind_, capacity_upper_bound);
   3472 
   3473   HAllocate* array = AllocateArray(capacity, length_field, fill_mode);
   3474   if (!elements_location_->has_size_upper_bound()) {
   3475     elements_location_->set_size_upper_bound(elememts_size_upper_bound);
   3476   }
   3477   return array;
   3478 }
   3479 
   3480 
   3481 HAllocate* HGraphBuilder::JSArrayBuilder::AllocateArray(
   3482     HValue* capacity,
   3483     HValue* length_field,
   3484     FillMode fill_mode) {
   3485   // These HForceRepresentations are because we store these as fields in the
   3486   // objects we construct, and an int32-to-smi HChange could deopt. Accept
   3487   // the deopt possibility now, before allocation occurs.
   3488   capacity =
   3489       builder()->AddUncasted<HForceRepresentation>(capacity,
   3490                                                    Representation::Smi());
   3491   length_field =
   3492       builder()->AddUncasted<HForceRepresentation>(length_field,
   3493                                                    Representation::Smi());
   3494 
   3495   // Generate size calculation code here in order to make it dominate
   3496   // the JSArray allocation.
   3497   HValue* elements_size =
   3498       builder()->BuildCalculateElementsSize(kind_, capacity);
   3499 
   3500   // Bail out for large objects.
   3501   HValue* max_regular_heap_object_size =
   3502       builder()->Add<HConstant>(Page::kMaxRegularHeapObjectSize);
   3503   builder()->Add<HBoundsCheck>(elements_size, max_regular_heap_object_size);
   3504 
   3505   // Allocate (dealing with failure appropriately)
   3506   HAllocate* array_object = builder()->AllocateJSArrayObject(mode_);
   3507 
   3508   // Fill in the fields: map, properties, length
   3509   HValue* map;
   3510   if (allocation_site_payload_ == NULL) {
   3511     map = EmitInternalMapCode();
   3512   } else {
   3513     map = EmitMapCode();
   3514   }
   3515 
   3516   builder()->BuildJSArrayHeader(array_object,
   3517                                 map,
   3518                                 NULL,  // set elements to empty fixed array
   3519                                 mode_,
   3520                                 kind_,
   3521                                 allocation_site_payload_,
   3522                                 length_field);
   3523 
   3524   // Allocate and initialize the elements
   3525   elements_location_ = builder()->BuildAllocateElements(kind_, elements_size);
   3526 
   3527   builder()->BuildInitializeElementsHeader(elements_location_, kind_, capacity);
   3528 
   3529   // Set the elements
   3530   builder()->Add<HStoreNamedField>(
   3531       array_object, HObjectAccess::ForElementsPointer(), elements_location_);
   3532 
   3533   if (fill_mode == FILL_WITH_HOLE) {
   3534     builder()->BuildFillElementsWithHole(elements_location_, kind_,
   3535                                          graph()->GetConstant0(), capacity);
   3536   }
   3537 
   3538   return array_object;
   3539 }
   3540 
   3541 
   3542 HValue* HGraphBuilder::AddLoadJSBuiltin(int context_index) {
   3543   HValue* native_context = BuildGetNativeContext();
   3544   HObjectAccess function_access = HObjectAccess::ForContextSlot(context_index);
   3545   return Add<HLoadNamedField>(native_context, nullptr, function_access);
   3546 }
   3547 
   3548 
   3549 HOptimizedGraphBuilder::HOptimizedGraphBuilder(CompilationInfo* info)
   3550     : HGraphBuilder(info),
   3551       function_state_(NULL),
   3552       initial_function_state_(this, info, NORMAL_RETURN, 0),
   3553       ast_context_(NULL),
   3554       break_scope_(NULL),
   3555       inlined_count_(0),
   3556       globals_(10, info->zone()),
   3557       osr_(new(info->zone()) HOsrBuilder(this)) {
   3558   // This is not initialized in the initializer list because the
   3559   // constructor for the initial state relies on function_state_ == NULL
   3560   // to know it's the initial state.
   3561   function_state_ = &initial_function_state_;
   3562   InitializeAstVisitor(info->isolate());
   3563   if (top_info()->is_tracking_positions()) {
   3564     SetSourcePosition(info->shared_info()->start_position());
   3565   }
   3566 }
   3567 
   3568 
   3569 HBasicBlock* HOptimizedGraphBuilder::CreateJoin(HBasicBlock* first,
   3570                                                 HBasicBlock* second,
   3571                                                 BailoutId join_id) {
   3572   if (first == NULL) {
   3573     return second;
   3574   } else if (second == NULL) {
   3575     return first;
   3576   } else {
   3577     HBasicBlock* join_block = graph()->CreateBasicBlock();
   3578     Goto(first, join_block);
   3579     Goto(second, join_block);
   3580     join_block->SetJoinId(join_id);
   3581     return join_block;
   3582   }
   3583 }
   3584 
   3585 
   3586 HBasicBlock* HOptimizedGraphBuilder::JoinContinue(IterationStatement* statement,
   3587                                                   HBasicBlock* exit_block,
   3588                                                   HBasicBlock* continue_block) {
   3589   if (continue_block != NULL) {
   3590     if (exit_block != NULL) Goto(exit_block, continue_block);
   3591     continue_block->SetJoinId(statement->ContinueId());
   3592     return continue_block;
   3593   }
   3594   return exit_block;
   3595 }
   3596 
   3597 
   3598 HBasicBlock* HOptimizedGraphBuilder::CreateLoop(IterationStatement* statement,
   3599                                                 HBasicBlock* loop_entry,
   3600                                                 HBasicBlock* body_exit,
   3601                                                 HBasicBlock* loop_successor,
   3602                                                 HBasicBlock* break_block) {
   3603   if (body_exit != NULL) Goto(body_exit, loop_entry);
   3604   loop_entry->PostProcessLoopHeader(statement);
   3605   if (break_block != NULL) {
   3606     if (loop_successor != NULL) Goto(loop_successor, break_block);
   3607     break_block->SetJoinId(statement->ExitId());
   3608     return break_block;
   3609   }
   3610   return loop_successor;
   3611 }
   3612 
   3613 
   3614 // Build a new loop header block and set it as the current block.
   3615 HBasicBlock* HOptimizedGraphBuilder::BuildLoopEntry() {
   3616   HBasicBlock* loop_entry = CreateLoopHeaderBlock();
   3617   Goto(loop_entry);
   3618   set_current_block(loop_entry);
   3619   return loop_entry;
   3620 }
   3621 
   3622 
   3623 HBasicBlock* HOptimizedGraphBuilder::BuildLoopEntry(
   3624     IterationStatement* statement) {
   3625   HBasicBlock* loop_entry = osr()->HasOsrEntryAt(statement)
   3626       ? osr()->BuildOsrLoopEntry(statement)
   3627       : BuildLoopEntry();
   3628   return loop_entry;
   3629 }
   3630 
   3631 
   3632 void HBasicBlock::FinishExit(HControlInstruction* instruction,
   3633                              SourcePosition position) {
   3634   Finish(instruction, position);
   3635   ClearEnvironment();
   3636 }
   3637 
   3638 
   3639 std::ostream& operator<<(std::ostream& os, const HBasicBlock& b) {
   3640   return os << "B" << b.block_id();
   3641 }
   3642 
   3643 
   3644 HGraph::HGraph(CompilationInfo* info)
   3645     : isolate_(info->isolate()),
   3646       next_block_id_(0),
   3647       entry_block_(NULL),
   3648       blocks_(8, info->zone()),
   3649       values_(16, info->zone()),
   3650       phi_list_(NULL),
   3651       uint32_instructions_(NULL),
   3652       osr_(NULL),
   3653       info_(info),
   3654       zone_(info->zone()),
   3655       is_recursive_(false),
   3656       use_optimistic_licm_(false),
   3657       depends_on_empty_array_proto_elements_(false),
   3658       type_change_checksum_(0),
   3659       maximum_environment_size_(0),
   3660       no_side_effects_scope_count_(0),
   3661       disallow_adding_new_values_(false) {
   3662   if (info->IsStub()) {
   3663     CallInterfaceDescriptor descriptor =
   3664         info->code_stub()->GetCallInterfaceDescriptor();
   3665     start_environment_ =
   3666         new (zone_) HEnvironment(zone_, descriptor.GetRegisterParameterCount());
   3667   } else {
   3668     if (info->is_tracking_positions()) {
   3669       info->TraceInlinedFunction(info->shared_info(), SourcePosition::Unknown(),
   3670                                  InlinedFunctionInfo::kNoParentId);
   3671     }
   3672     start_environment_ =
   3673         new(zone_) HEnvironment(NULL, info->scope(), info->closure(), zone_);
   3674   }
   3675   start_environment_->set_ast_id(BailoutId::FunctionContext());
   3676   entry_block_ = CreateBasicBlock();
   3677   entry_block_->SetInitialEnvironment(start_environment_);
   3678 }
   3679 
   3680 
   3681 HBasicBlock* HGraph::CreateBasicBlock() {
   3682   HBasicBlock* result = new(zone()) HBasicBlock(this);
   3683   blocks_.Add(result, zone());
   3684   return result;
   3685 }
   3686 
   3687 
   3688 void HGraph::FinalizeUniqueness() {
   3689   DisallowHeapAllocation no_gc;
   3690   for (int i = 0; i < blocks()->length(); ++i) {
   3691     for (HInstructionIterator it(blocks()->at(i)); !it.Done(); it.Advance()) {
   3692       it.Current()->FinalizeUniqueness();
   3693     }
   3694   }
   3695 }
   3696 
   3697 
   3698 int HGraph::SourcePositionToScriptPosition(SourcePosition pos) {
   3699   return (FLAG_hydrogen_track_positions && !pos.IsUnknown())
   3700              ? info()->start_position_for(pos.inlining_id()) + pos.position()
   3701              : pos.raw();
   3702 }
   3703 
   3704 
   3705 // Block ordering was implemented with two mutually recursive methods,
   3706 // HGraph::Postorder and HGraph::PostorderLoopBlocks.
   3707 // The recursion could lead to stack overflow so the algorithm has been
   3708 // implemented iteratively.
   3709 // At a high level the algorithm looks like this:
   3710 //
   3711 // Postorder(block, loop_header) : {
   3712 //   if (block has already been visited or is of another loop) return;
   3713 //   mark block as visited;
   3714 //   if (block is a loop header) {
   3715 //     VisitLoopMembers(block, loop_header);
   3716 //     VisitSuccessorsOfLoopHeader(block);
   3717 //   } else {
   3718 //     VisitSuccessors(block)
   3719 //   }
   3720 //   put block in result list;
   3721 // }
   3722 //
   3723 // VisitLoopMembers(block, outer_loop_header) {
   3724 //   foreach (block b in block loop members) {
   3725 //     VisitSuccessorsOfLoopMember(b, outer_loop_header);
   3726 //     if (b is loop header) VisitLoopMembers(b);
   3727 //   }
   3728 // }
   3729 //
   3730 // VisitSuccessorsOfLoopMember(block, outer_loop_header) {
   3731 //   foreach (block b in block successors) Postorder(b, outer_loop_header)
   3732 // }
   3733 //
   3734 // VisitSuccessorsOfLoopHeader(block) {
   3735 //   foreach (block b in block successors) Postorder(b, block)
   3736 // }
   3737 //
   3738 // VisitSuccessors(block, loop_header) {
   3739 //   foreach (block b in block successors) Postorder(b, loop_header)
   3740 // }
   3741 //
   3742 // The ordering is started calling Postorder(entry, NULL).
   3743 //
   3744 // Each instance of PostorderProcessor represents the "stack frame" of the
   3745 // recursion, and particularly keeps the state of the loop (iteration) of the
   3746 // "Visit..." function it represents.
   3747 // To recycle memory we keep all the frames in a double linked list but
   3748 // this means that we cannot use constructors to initialize the frames.
   3749 //
   3750 class PostorderProcessor : public ZoneObject {
   3751  public:
   3752   // Back link (towards the stack bottom).
   3753   PostorderProcessor* parent() {return father_; }
   3754   // Forward link (towards the stack top).
   3755   PostorderProcessor* child() {return child_; }
   3756   HBasicBlock* block() { return block_; }
   3757   HLoopInformation* loop() { return loop_; }
   3758   HBasicBlock* loop_header() { return loop_header_; }
   3759 
   3760   static PostorderProcessor* CreateEntryProcessor(Zone* zone,
   3761                                                   HBasicBlock* block) {
   3762     PostorderProcessor* result = new(zone) PostorderProcessor(NULL);
   3763     return result->SetupSuccessors(zone, block, NULL);
   3764   }
   3765 
   3766   PostorderProcessor* PerformStep(Zone* zone,
   3767                                   ZoneList<HBasicBlock*>* order) {
   3768     PostorderProcessor* next =
   3769         PerformNonBacktrackingStep(zone, order);
   3770     if (next != NULL) {
   3771       return next;
   3772     } else {
   3773       return Backtrack(zone, order);
   3774     }
   3775   }
   3776 
   3777  private:
   3778   explicit PostorderProcessor(PostorderProcessor* father)
   3779       : father_(father), child_(NULL), successor_iterator(NULL) { }
   3780 
   3781   // Each enum value states the cycle whose state is kept by this instance.
   3782   enum LoopKind {
   3783     NONE,
   3784     SUCCESSORS,
   3785     SUCCESSORS_OF_LOOP_HEADER,
   3786     LOOP_MEMBERS,
   3787     SUCCESSORS_OF_LOOP_MEMBER
   3788   };
   3789 
   3790   // Each "Setup..." method is like a constructor for a cycle state.
   3791   PostorderProcessor* SetupSuccessors(Zone* zone,
   3792                                       HBasicBlock* block,
   3793                                       HBasicBlock* loop_header) {
   3794     if (block == NULL || block->IsOrdered() ||
   3795         block->parent_loop_header() != loop_header) {
   3796       kind_ = NONE;
   3797       block_ = NULL;
   3798       loop_ = NULL;
   3799       loop_header_ = NULL;
   3800       return this;
   3801     } else {
   3802       block_ = block;
   3803       loop_ = NULL;
   3804       block->MarkAsOrdered();
   3805 
   3806       if (block->IsLoopHeader()) {
   3807         kind_ = SUCCESSORS_OF_LOOP_HEADER;
   3808         loop_header_ = block;
   3809         InitializeSuccessors();
   3810         PostorderProcessor* result = Push(zone);
   3811         return result->SetupLoopMembers(zone, block, block->loop_information(),
   3812                                         loop_header);
   3813       } else {
   3814         DCHECK(block->IsFinished());
   3815         kind_ = SUCCESSORS;
   3816         loop_header_ = loop_header;
   3817         InitializeSuccessors();
   3818         return this;
   3819       }
   3820     }
   3821   }
   3822 
   3823   PostorderProcessor* SetupLoopMembers(Zone* zone,
   3824                                        HBasicBlock* block,
   3825                                        HLoopInformation* loop,
   3826                                        HBasicBlock* loop_header) {
   3827     kind_ = LOOP_MEMBERS;
   3828     block_ = block;
   3829     loop_ = loop;
   3830     loop_header_ = loop_header;
   3831     InitializeLoopMembers();
   3832     return this;
   3833   }
   3834 
   3835   PostorderProcessor* SetupSuccessorsOfLoopMember(
   3836       HBasicBlock* block,
   3837       HLoopInformation* loop,
   3838       HBasicBlock* loop_header) {
   3839     kind_ = SUCCESSORS_OF_LOOP_MEMBER;
   3840     block_ = block;
   3841     loop_ = loop;
   3842     loop_header_ = loop_header;
   3843     InitializeSuccessors();
   3844     return this;
   3845   }
   3846 
   3847   // This method "allocates" a new stack frame.
   3848   PostorderProcessor* Push(Zone* zone) {
   3849     if (child_ == NULL) {
   3850       child_ = new(zone) PostorderProcessor(this);
   3851     }
   3852     return child_;
   3853   }
   3854 
   3855   void ClosePostorder(ZoneList<HBasicBlock*>* order, Zone* zone) {
   3856     DCHECK(block_->end()->FirstSuccessor() == NULL ||
   3857            order->Contains(block_->end()->FirstSuccessor()) ||
   3858            block_->end()->FirstSuccessor()->IsLoopHeader());
   3859     DCHECK(block_->end()->SecondSuccessor() == NULL ||
   3860            order->Contains(block_->end()->SecondSuccessor()) ||
   3861            block_->end()->SecondSuccessor()->IsLoopHeader());
   3862     order->Add(block_, zone);
   3863   }
   3864 
   3865   // This method is the basic block to walk up the stack.
   3866   PostorderProcessor* Pop(Zone* zone,
   3867                           ZoneList<HBasicBlock*>* order) {
   3868     switch (kind_) {
   3869       case SUCCESSORS:
   3870       case SUCCESSORS_OF_LOOP_HEADER:
   3871         ClosePostorder(order, zone);
   3872         return father_;
   3873       case LOOP_MEMBERS:
   3874         return father_;
   3875       case SUCCESSORS_OF_LOOP_MEMBER:
   3876         if (block()->IsLoopHeader() && block() != loop_->loop_header()) {
   3877           // In this case we need to perform a LOOP_MEMBERS cycle so we
   3878           // initialize it and return this instead of father.
   3879           return SetupLoopMembers(zone, block(),
   3880                                   block()->loop_information(), loop_header_);
   3881         } else {
   3882           return father_;
   3883         }
   3884       case NONE:
   3885         return father_;
   3886     }
   3887     UNREACHABLE();
   3888     return NULL;
   3889   }
   3890 
   3891   // Walks up the stack.
   3892   PostorderProcessor* Backtrack(Zone* zone,
   3893                                 ZoneList<HBasicBlock*>* order) {
   3894     PostorderProcessor* parent = Pop(zone, order);
   3895     while (parent != NULL) {
   3896       PostorderProcessor* next =
   3897           parent->PerformNonBacktrackingStep(zone, order);
   3898       if (next != NULL) {
   3899         return next;
   3900       } else {
   3901         parent = parent->Pop(zone, order);
   3902       }
   3903     }
   3904     return NULL;
   3905   }
   3906 
   3907   PostorderProcessor* PerformNonBacktrackingStep(
   3908       Zone* zone,
   3909       ZoneList<HBasicBlock*>* order) {
   3910     HBasicBlock* next_block;
   3911     switch (kind_) {
   3912       case SUCCESSORS:
   3913         next_block = AdvanceSuccessors();
   3914         if (next_block != NULL) {
   3915           PostorderProcessor* result = Push(zone);
   3916           return result->SetupSuccessors(zone, next_block, loop_header_);
   3917         }
   3918         break;
   3919       case SUCCESSORS_OF_LOOP_HEADER:
   3920         next_block = AdvanceSuccessors();
   3921         if (next_block != NULL) {
   3922           PostorderProcessor* result = Push(zone);
   3923           return result->SetupSuccessors(zone, next_block, block());
   3924         }
   3925         break;
   3926       case LOOP_MEMBERS:
   3927         next_block = AdvanceLoopMembers();
   3928         if (next_block != NULL) {
   3929           PostorderProcessor* result = Push(zone);
   3930           return result->SetupSuccessorsOfLoopMember(next_block,
   3931                                                      loop_, loop_header_);
   3932         }
   3933         break;
   3934       case SUCCESSORS_OF_LOOP_MEMBER:
   3935         next_block = AdvanceSuccessors();
   3936         if (next_block != NULL) {
   3937           PostorderProcessor* result = Push(zone);
   3938           return result->SetupSuccessors(zone, next_block, loop_header_);
   3939         }
   3940         break;
   3941       case NONE:
   3942         return NULL;
   3943     }
   3944     return NULL;
   3945   }
   3946 
   3947   // The following two methods implement a "foreach b in successors" cycle.
   3948   void InitializeSuccessors() {
   3949     loop_index = 0;
   3950     loop_length = 0;
   3951     successor_iterator = HSuccessorIterator(block_->end());
   3952   }
   3953 
   3954   HBasicBlock* AdvanceSuccessors() {
   3955     if (!successor_iterator.Done()) {
   3956       HBasicBlock* result = successor_iterator.Current();
   3957       successor_iterator.Advance();
   3958       return result;
   3959     }
   3960     return NULL;
   3961   }
   3962 
   3963   // The following two methods implement a "foreach b in loop members" cycle.
   3964   void InitializeLoopMembers() {
   3965     loop_index = 0;
   3966     loop_length = loop_->blocks()->length();
   3967   }
   3968 
   3969   HBasicBlock* AdvanceLoopMembers() {
   3970     if (loop_index < loop_length) {
   3971       HBasicBlock* result = loop_->blocks()->at(loop_index);
   3972       loop_index++;
   3973       return result;
   3974     } else {
   3975       return NULL;
   3976     }
   3977   }
   3978 
   3979   LoopKind kind_;
   3980   PostorderProcessor* father_;
   3981   PostorderProcessor* child_;
   3982   HLoopInformation* loop_;
   3983   HBasicBlock* block_;
   3984   HBasicBlock* loop_header_;
   3985   int loop_index;
   3986   int loop_length;
   3987   HSuccessorIterator successor_iterator;
   3988 };
   3989 
   3990 
   3991 void HGraph::OrderBlocks() {
   3992   CompilationPhase phase("H_Block ordering", info());
   3993 
   3994 #ifdef DEBUG
   3995   // Initially the blocks must not be ordered.
   3996   for (int i = 0; i < blocks_.length(); ++i) {
   3997     DCHECK(!blocks_[i]->IsOrdered());
   3998   }
   3999 #endif
   4000 
   4001   PostorderProcessor* postorder =
   4002       PostorderProcessor::CreateEntryProcessor(zone(), blocks_[0]);
   4003   blocks_.Rewind(0);
   4004   while (postorder) {
   4005     postorder = postorder->PerformStep(zone(), &blocks_);
   4006   }
   4007 
   4008 #ifdef DEBUG
   4009   // Now all blocks must be marked as ordered.
   4010   for (int i = 0; i < blocks_.length(); ++i) {
   4011     DCHECK(blocks_[i]->IsOrdered());
   4012   }
   4013 #endif
   4014 
   4015   // Reverse block list and assign block IDs.
   4016   for (int i = 0, j = blocks_.length(); --j >= i; ++i) {
   4017     HBasicBlock* bi = blocks_[i];
   4018     HBasicBlock* bj = blocks_[j];
   4019     bi->set_block_id(j);
   4020     bj->set_block_id(i);
   4021     blocks_[i] = bj;
   4022     blocks_[j] = bi;
   4023   }
   4024 }
   4025 
   4026 
   4027 void HGraph::AssignDominators() {
   4028   HPhase phase("H_Assign dominators", this);
   4029   for (int i = 0; i < blocks_.length(); ++i) {
   4030     HBasicBlock* block = blocks_[i];
   4031     if (block->IsLoopHeader()) {
   4032       // Only the first predecessor of a loop header is from outside the loop.
   4033       // All others are back edges, and thus cannot dominate the loop header.
   4034       block->AssignCommonDominator(block->predecessors()->first());
   4035       block->AssignLoopSuccessorDominators();
   4036     } else {
   4037       for (int j = blocks_[i]->predecessors()->length() - 1; j >= 0; --j) {
   4038         blocks_[i]->AssignCommonDominator(blocks_[i]->predecessors()->at(j));
   4039       }
   4040     }
   4041   }
   4042 }
   4043 
   4044 
   4045 bool HGraph::CheckArgumentsPhiUses() {
   4046   int block_count = blocks_.length();
   4047   for (int i = 0; i < block_count; ++i) {
   4048     for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
   4049       HPhi* phi = blocks_[i]->phis()->at(j);
   4050       // We don't support phi uses of arguments for now.
   4051       if (phi->CheckFlag(HValue::kIsArguments)) return false;
   4052     }
   4053   }
   4054   return true;
   4055 }
   4056 
   4057 
   4058 bool HGraph::CheckConstPhiUses() {
   4059   int block_count = blocks_.length();
   4060   for (int i = 0; i < block_count; ++i) {
   4061     for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
   4062       HPhi* phi = blocks_[i]->phis()->at(j);
   4063       // Check for the hole value (from an uninitialized const).
   4064       for (int k = 0; k < phi->OperandCount(); k++) {
   4065         if (phi->OperandAt(k) == GetConstantHole()) return false;
   4066       }
   4067     }
   4068   }
   4069   return true;
   4070 }
   4071 
   4072 
   4073 void HGraph::CollectPhis() {
   4074   int block_count = blocks_.length();
   4075   phi_list_ = new(zone()) ZoneList<HPhi*>(block_count, zone());
   4076   for (int i = 0; i < block_count; ++i) {
   4077     for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
   4078       HPhi* phi = blocks_[i]->phis()->at(j);
   4079       phi_list_->Add(phi, zone());
   4080     }
   4081   }
   4082 }
   4083 
   4084 
   4085 // Implementation of utility class to encapsulate the translation state for
   4086 // a (possibly inlined) function.
   4087 FunctionState::FunctionState(HOptimizedGraphBuilder* owner,
   4088                              CompilationInfo* info, InliningKind inlining_kind,
   4089                              int inlining_id)
   4090     : owner_(owner),
   4091       compilation_info_(info),
   4092       call_context_(NULL),
   4093       inlining_kind_(inlining_kind),
   4094       function_return_(NULL),
   4095       test_context_(NULL),
   4096       entry_(NULL),
   4097       arguments_object_(NULL),
   4098       arguments_elements_(NULL),
   4099       inlining_id_(inlining_id),
   4100       outer_source_position_(SourcePosition::Unknown()),
   4101       outer_(owner->function_state()) {
   4102   if (outer_ != NULL) {
   4103     // State for an inline function.
   4104     if (owner->ast_context()->IsTest()) {
   4105       HBasicBlock* if_true = owner->graph()->CreateBasicBlock();
   4106       HBasicBlock* if_false = owner->graph()->CreateBasicBlock();
   4107       if_true->MarkAsInlineReturnTarget(owner->current_block());
   4108       if_false->MarkAsInlineReturnTarget(owner->current_block());
   4109       TestContext* outer_test_context = TestContext::cast(owner->ast_context());
   4110       Expression* cond = outer_test_context->condition();
   4111       // The AstContext constructor pushed on the context stack.  This newed
   4112       // instance is the reason that AstContext can't be BASE_EMBEDDED.
   4113       test_context_ = new TestContext(owner, cond, if_true, if_false);
   4114     } else {
   4115       function_return_ = owner->graph()->CreateBasicBlock();
   4116       function_return()->MarkAsInlineReturnTarget(owner->current_block());
   4117     }
   4118     // Set this after possibly allocating a new TestContext above.
   4119     call_context_ = owner->ast_context();
   4120   }
   4121 
   4122   // Push on the state stack.
   4123   owner->set_function_state(this);
   4124 
   4125   if (compilation_info_->is_tracking_positions()) {
   4126     outer_source_position_ = owner->source_position();
   4127     owner->EnterInlinedSource(
   4128       info->shared_info()->start_position(),
   4129       inlining_id);
   4130     owner->SetSourcePosition(info->shared_info()->start_position());
   4131   }
   4132 }
   4133 
   4134 
   4135 FunctionState::~FunctionState() {
   4136   delete test_context_;
   4137   owner_->set_function_state(outer_);
   4138 
   4139   if (compilation_info_->is_tracking_positions()) {
   4140     owner_->set_source_position(outer_source_position_);
   4141     owner_->EnterInlinedSource(
   4142       outer_->compilation_info()->shared_info()->start_position(),
   4143       outer_->inlining_id());
   4144   }
   4145 }
   4146 
   4147 
   4148 // Implementation of utility classes to represent an expression's context in
   4149 // the AST.
   4150 AstContext::AstContext(HOptimizedGraphBuilder* owner, Expression::Context kind)
   4151     : owner_(owner),
   4152       kind_(kind),
   4153       outer_(owner->ast_context()),
   4154       typeof_mode_(NOT_INSIDE_TYPEOF) {
   4155   owner->set_ast_context(this);  // Push.
   4156 #ifdef DEBUG
   4157   DCHECK(owner->environment()->frame_type() == JS_FUNCTION);
   4158   original_length_ = owner->environment()->length();
   4159 #endif
   4160 }
   4161 
   4162 
   4163 AstContext::~AstContext() {
   4164   owner_->set_ast_context(outer_);  // Pop.
   4165 }
   4166 
   4167 
   4168 EffectContext::~EffectContext() {
   4169   DCHECK(owner()->HasStackOverflow() ||
   4170          owner()->current_block() == NULL ||
   4171          (owner()->environment()->length() == original_length_ &&
   4172           owner()->environment()->frame_type() == JS_FUNCTION));
   4173 }
   4174 
   4175 
   4176 ValueContext::~ValueContext() {
   4177   DCHECK(owner()->HasStackOverflow() ||
   4178          owner()->current_block() == NULL ||
   4179          (owner()->environment()->length() == original_length_ + 1 &&
   4180           owner()->environment()->frame_type() == JS_FUNCTION));
   4181 }
   4182 
   4183 
   4184 void EffectContext::ReturnValue(HValue* value) {
   4185   // The value is simply ignored.
   4186 }
   4187 
   4188 
   4189 void ValueContext::ReturnValue(HValue* value) {
   4190   // The value is tracked in the bailout environment, and communicated
   4191   // through the environment as the result of the expression.
   4192   if (value->CheckFlag(HValue::kIsArguments)) {
   4193     if (flag_ == ARGUMENTS_FAKED) {
   4194       value = owner()->graph()->GetConstantUndefined();
   4195     } else if (!arguments_allowed()) {
   4196       owner()->Bailout(kBadValueContextForArgumentsValue);
   4197     }
   4198   }
   4199   owner()->Push(value);
   4200 }
   4201 
   4202 
   4203 void TestContext::ReturnValue(HValue* value) {
   4204   BuildBranch(value);
   4205 }
   4206 
   4207 
   4208 void EffectContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
   4209   DCHECK(!instr->IsControlInstruction());
   4210   owner()->AddInstruction(instr);
   4211   if (instr->HasObservableSideEffects()) {
   4212     owner()->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
   4213   }
   4214 }
   4215 
   4216 
   4217 void EffectContext::ReturnControl(HControlInstruction* instr,
   4218                                   BailoutId ast_id) {
   4219   DCHECK(!instr->HasObservableSideEffects());
   4220   HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock();
   4221   HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock();
   4222   instr->SetSuccessorAt(0, empty_true);
   4223   instr->SetSuccessorAt(1, empty_false);
   4224   owner()->FinishCurrentBlock(instr);
   4225   HBasicBlock* join = owner()->CreateJoin(empty_true, empty_false, ast_id);
   4226   owner()->set_current_block(join);
   4227 }
   4228 
   4229 
   4230 void EffectContext::ReturnContinuation(HIfContinuation* continuation,
   4231                                        BailoutId ast_id) {
   4232   HBasicBlock* true_branch = NULL;
   4233   HBasicBlock* false_branch = NULL;
   4234   continuation->Continue(&true_branch, &false_branch);
   4235   if (!continuation->IsTrueReachable()) {
   4236     owner()->set_current_block(false_branch);
   4237   } else if (!continuation->IsFalseReachable()) {
   4238     owner()->set_current_block(true_branch);
   4239   } else {
   4240     HBasicBlock* join = owner()->CreateJoin(true_branch, false_branch, ast_id);
   4241     owner()->set_current_block(join);
   4242   }
   4243 }
   4244 
   4245 
   4246 void ValueContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
   4247   DCHECK(!instr->IsControlInstruction());
   4248   if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) {
   4249     return owner()->Bailout(kBadValueContextForArgumentsObjectValue);
   4250   }
   4251   owner()->AddInstruction(instr);
   4252   owner()->Push(instr);
   4253   if (instr->HasObservableSideEffects()) {
   4254     owner()->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
   4255   }
   4256 }
   4257 
   4258 
   4259 void ValueContext::ReturnControl(HControlInstruction* instr, BailoutId ast_id) {
   4260   DCHECK(!instr->HasObservableSideEffects());
   4261   if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) {
   4262     return owner()->Bailout(kBadValueContextForArgumentsObjectValue);
   4263   }
   4264   HBasicBlock* materialize_false = owner()->graph()->CreateBasicBlock();
   4265   HBasicBlock* materialize_true = owner()->graph()->CreateBasicBlock();
   4266   instr->SetSuccessorAt(0, materialize_true);
   4267   instr->SetSuccessorAt(1, materialize_false);
   4268   owner()->FinishCurrentBlock(instr);
   4269   owner()->set_current_block(materialize_true);
   4270   owner()->Push(owner()->graph()->GetConstantTrue());
   4271   owner()->set_current_block(materialize_false);
   4272   owner()->Push(owner()->graph()->GetConstantFalse());
   4273   HBasicBlock* join =
   4274     owner()->CreateJoin(materialize_true, materialize_false, ast_id);
   4275   owner()->set_current_block(join);
   4276 }
   4277 
   4278 
   4279 void ValueContext::ReturnContinuation(HIfContinuation* continuation,
   4280                                       BailoutId ast_id) {
   4281   HBasicBlock* materialize_true = NULL;
   4282   HBasicBlock* materialize_false = NULL;
   4283   continuation->Continue(&materialize_true, &materialize_false);
   4284   if (continuation->IsTrueReachable()) {
   4285     owner()->set_current_block(materialize_true);
   4286     owner()->Push(owner()->graph()->GetConstantTrue());
   4287     owner()->set_current_block(materialize_true);
   4288   }
   4289   if (continuation->IsFalseReachable()) {
   4290     owner()->set_current_block(materialize_false);
   4291     owner()->Push(owner()->graph()->GetConstantFalse());
   4292     owner()->set_current_block(materialize_false);
   4293   }
   4294   if (continuation->TrueAndFalseReachable()) {
   4295     HBasicBlock* join =
   4296         owner()->CreateJoin(materialize_true, materialize_false, ast_id);
   4297     owner()->set_current_block(join);
   4298   }
   4299 }
   4300 
   4301 
   4302 void TestContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
   4303   DCHECK(!instr->IsControlInstruction());
   4304   HOptimizedGraphBuilder* builder = owner();
   4305   builder->AddInstruction(instr);
   4306   // We expect a simulate after every expression with side effects, though
   4307   // this one isn't actually needed (and wouldn't work if it were targeted).
   4308   if (instr->HasObservableSideEffects()) {
   4309     builder->Push(instr);
   4310     builder->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
   4311     builder->Pop();
   4312   }
   4313   BuildBranch(instr);
   4314 }
   4315 
   4316 
   4317 void TestContext::ReturnControl(HControlInstruction* instr, BailoutId ast_id) {
   4318   DCHECK(!instr->HasObservableSideEffects());
   4319   HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock();
   4320   HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock();
   4321   instr->SetSuccessorAt(0, empty_true);
   4322   instr->SetSuccessorAt(1, empty_false);
   4323   owner()->FinishCurrentBlock(instr);
   4324   owner()->Goto(empty_true, if_true(), owner()->function_state());
   4325   owner()->Goto(empty_false, if_false(), owner()->function_state());
   4326   owner()->set_current_block(NULL);
   4327 }
   4328 
   4329 
   4330 void TestContext::ReturnContinuation(HIfContinuation* continuation,
   4331                                      BailoutId ast_id) {
   4332   HBasicBlock* true_branch = NULL;
   4333   HBasicBlock* false_branch = NULL;
   4334   continuation->Continue(&true_branch, &false_branch);
   4335   if (continuation->IsTrueReachable()) {
   4336     owner()->Goto(true_branch, if_true(), owner()->function_state());
   4337   }
   4338   if (continuation->IsFalseReachable()) {
   4339     owner()->Goto(false_branch, if_false(), owner()->function_state());
   4340   }
   4341   owner()->set_current_block(NULL);
   4342 }
   4343 
   4344 
   4345 void TestContext::BuildBranch(HValue* value) {
   4346   // We expect the graph to be in edge-split form: there is no edge that
   4347   // connects a branch node to a join node.  We conservatively ensure that
   4348   // property by always adding an empty block on the outgoing edges of this
   4349   // branch.
   4350   HOptimizedGraphBuilder* builder = owner();
   4351   if (value != NULL && value->CheckFlag(HValue::kIsArguments)) {
   4352     builder->Bailout(kArgumentsObjectValueInATestContext);
   4353   }
   4354   ToBooleanStub::Types expected(condition()->to_boolean_types());
   4355   ReturnControl(owner()->New<HBranch>(value, expected), BailoutId::None());
   4356 }
   4357 
   4358 
   4359 // HOptimizedGraphBuilder infrastructure for bailing out and checking bailouts.
   4360 #define CHECK_BAILOUT(call)                     \
   4361   do {                                          \
   4362     call;                                       \
   4363     if (HasStackOverflow()) return;             \
   4364   } while (false)
   4365 
   4366 
   4367 #define CHECK_ALIVE(call)                                       \
   4368   do {                                                          \
   4369     call;                                                       \
   4370     if (HasStackOverflow() || current_block() == NULL) return;  \
   4371   } while (false)
   4372 
   4373 
   4374 #define CHECK_ALIVE_OR_RETURN(call, value)                            \
   4375   do {                                                                \
   4376     call;                                                             \
   4377     if (HasStackOverflow() || current_block() == NULL) return value;  \
   4378   } while (false)
   4379 
   4380 
   4381 void HOptimizedGraphBuilder::Bailout(BailoutReason reason) {
   4382   current_info()->AbortOptimization(reason);
   4383   SetStackOverflow();
   4384 }
   4385 
   4386 
   4387 void HOptimizedGraphBuilder::VisitForEffect(Expression* expr) {
   4388   EffectContext for_effect(this);
   4389   Visit(expr);
   4390 }
   4391 
   4392 
   4393 void HOptimizedGraphBuilder::VisitForValue(Expression* expr,
   4394                                            ArgumentsAllowedFlag flag) {
   4395   ValueContext for_value(this, flag);
   4396   Visit(expr);
   4397 }
   4398 
   4399 
   4400 void HOptimizedGraphBuilder::VisitForTypeOf(Expression* expr) {
   4401   ValueContext for_value(this, ARGUMENTS_NOT_ALLOWED);
   4402   for_value.set_typeof_mode(INSIDE_TYPEOF);
   4403   Visit(expr);
   4404 }
   4405 
   4406 
   4407 void HOptimizedGraphBuilder::VisitForControl(Expression* expr,
   4408                                              HBasicBlock* true_block,
   4409                                              HBasicBlock* false_block) {
   4410   TestContext for_control(this, expr, true_block, false_block);
   4411   Visit(expr);
   4412 }
   4413 
   4414 
   4415 void HOptimizedGraphBuilder::VisitExpressions(
   4416     ZoneList<Expression*>* exprs) {
   4417   for (int i = 0; i < exprs->length(); ++i) {
   4418     CHECK_ALIVE(VisitForValue(exprs->at(i)));
   4419   }
   4420 }
   4421 
   4422 
   4423 void HOptimizedGraphBuilder::VisitExpressions(ZoneList<Expression*>* exprs,
   4424                                               ArgumentsAllowedFlag flag) {
   4425   for (int i = 0; i < exprs->length(); ++i) {
   4426     CHECK_ALIVE(VisitForValue(exprs->at(i), flag));
   4427   }
   4428 }
   4429 
   4430 
   4431 bool HOptimizedGraphBuilder::BuildGraph() {
   4432   if (IsSubclassConstructor(current_info()->literal()->kind())) {
   4433     Bailout(kSuperReference);
   4434     return false;
   4435   }
   4436 
   4437   Scope* scope = current_info()->scope();
   4438   SetUpScope(scope);
   4439 
   4440   // Add an edge to the body entry.  This is warty: the graph's start
   4441   // environment will be used by the Lithium translation as the initial
   4442   // environment on graph entry, but it has now been mutated by the
   4443   // Hydrogen translation of the instructions in the start block.  This
   4444   // environment uses values which have not been defined yet.  These
   4445   // Hydrogen instructions will then be replayed by the Lithium
   4446   // translation, so they cannot have an environment effect.  The edge to
   4447   // the body's entry block (along with some special logic for the start
   4448   // block in HInstruction::InsertAfter) seals the start block from
   4449   // getting unwanted instructions inserted.
   4450   //
   4451   // TODO(kmillikin): Fix this.  Stop mutating the initial environment.
   4452   // Make the Hydrogen instructions in the initial block into Hydrogen
   4453   // values (but not instructions), present in the initial environment and
   4454   // not replayed by the Lithium translation.
   4455   HEnvironment* initial_env = environment()->CopyWithoutHistory();
   4456   HBasicBlock* body_entry = CreateBasicBlock(initial_env);
   4457   Goto(body_entry);
   4458   body_entry->SetJoinId(BailoutId::FunctionEntry());
   4459   set_current_block(body_entry);
   4460 
   4461   VisitDeclarations(scope->declarations());
   4462   Add<HSimulate>(BailoutId::Declarations());
   4463 
   4464   Add<HStackCheck>(HStackCheck::kFunctionEntry);
   4465 
   4466   VisitStatements(current_info()->literal()->body());
   4467   if (HasStackOverflow()) return false;
   4468 
   4469   if (current_block() != NULL) {
   4470     Add<HReturn>(graph()->GetConstantUndefined());
   4471     set_current_block(NULL);
   4472   }
   4473 
   4474   // If the checksum of the number of type info changes is the same as the
   4475   // last time this function was compiled, then this recompile is likely not
   4476   // due to missing/inadequate type feedback, but rather too aggressive
   4477   // optimization. Disable optimistic LICM in that case.
   4478   Handle<Code> unoptimized_code(current_info()->shared_info()->code());
   4479   DCHECK(unoptimized_code->kind() == Code::FUNCTION);
   4480   Handle<TypeFeedbackInfo> type_info(
   4481       TypeFeedbackInfo::cast(unoptimized_code->type_feedback_info()));
   4482   int checksum = type_info->own_type_change_checksum();
   4483   int composite_checksum = graph()->update_type_change_checksum(checksum);
   4484   graph()->set_use_optimistic_licm(
   4485       !type_info->matches_inlined_type_change_checksum(composite_checksum));
   4486   type_info->set_inlined_type_change_checksum(composite_checksum);
   4487 
   4488   // Perform any necessary OSR-specific cleanups or changes to the graph.
   4489   osr()->FinishGraph();
   4490 
   4491   return true;
   4492 }
   4493 
   4494 
   4495 bool HGraph::Optimize(BailoutReason* bailout_reason) {
   4496   OrderBlocks();
   4497   AssignDominators();
   4498 
   4499   // We need to create a HConstant "zero" now so that GVN will fold every
   4500   // zero-valued constant in the graph together.
   4501   // The constant is needed to make idef-based bounds check work: the pass
   4502   // evaluates relations with "zero" and that zero cannot be created after GVN.
   4503   GetConstant0();
   4504 
   4505 #ifdef DEBUG
   4506   // Do a full verify after building the graph and computing dominators.
   4507   Verify(true);
   4508 #endif
   4509 
   4510   if (FLAG_analyze_environment_liveness && maximum_environment_size() != 0) {
   4511     Run<HEnvironmentLivenessAnalysisPhase>();
   4512   }
   4513 
   4514   if (!CheckConstPhiUses()) {
   4515     *bailout_reason = kUnsupportedPhiUseOfConstVariable;
   4516     return false;
   4517   }
   4518   Run<HRedundantPhiEliminationPhase>();
   4519   if (!CheckArgumentsPhiUses()) {
   4520     *bailout_reason = kUnsupportedPhiUseOfArguments;
   4521     return false;
   4522   }
   4523 
   4524   // Find and mark unreachable code to simplify optimizations, especially gvn,
   4525   // where unreachable code could unnecessarily defeat LICM.
   4526   Run<HMarkUnreachableBlocksPhase>();
   4527 
   4528   if (FLAG_dead_code_elimination) Run<HDeadCodeEliminationPhase>();
   4529   if (FLAG_use_escape_analysis) Run<HEscapeAnalysisPhase>();
   4530 
   4531   if (FLAG_load_elimination) Run<HLoadEliminationPhase>();
   4532 
   4533   CollectPhis();
   4534 
   4535   if (has_osr()) osr()->FinishOsrValues();
   4536 
   4537   Run<HInferRepresentationPhase>();
   4538 
   4539   // Remove HSimulate instructions that have turned out not to be needed
   4540   // after all by folding them into the following HSimulate.
   4541   // This must happen after inferring representations.
   4542   Run<HMergeRemovableSimulatesPhase>();
   4543 
   4544   Run<HMarkDeoptimizeOnUndefinedPhase>();
   4545   Run<HRepresentationChangesPhase>();
   4546 
   4547   Run<HInferTypesPhase>();
   4548 
   4549   // Must be performed before canonicalization to ensure that Canonicalize
   4550   // will not remove semantically meaningful ToInt32 operations e.g. BIT_OR with
   4551   // zero.
   4552   Run<HUint32AnalysisPhase>();
   4553 
   4554   if (FLAG_use_canonicalizing) Run<HCanonicalizePhase>();
   4555 
   4556   if (FLAG_use_gvn) Run<HGlobalValueNumberingPhase>();
   4557 
   4558   if (FLAG_check_elimination) Run<HCheckEliminationPhase>();
   4559 
   4560   if (FLAG_store_elimination) Run<HStoreEliminationPhase>();
   4561 
   4562   Run<HRangeAnalysisPhase>();
   4563 
   4564   Run<HComputeChangeUndefinedToNaN>();
   4565 
   4566   // Eliminate redundant stack checks on backwards branches.
   4567   Run<HStackCheckEliminationPhase>();
   4568 
   4569   if (FLAG_array_bounds_checks_elimination) Run<HBoundsCheckEliminationPhase>();
   4570   if (FLAG_array_bounds_checks_hoisting) Run<HBoundsCheckHoistingPhase>();
   4571   if (FLAG_array_index_dehoisting) Run<HDehoistIndexComputationsPhase>();
   4572   if (FLAG_dead_code_elimination) Run<HDeadCodeEliminationPhase>();
   4573 
   4574   RestoreActualValues();
   4575 
   4576   // Find unreachable code a second time, GVN and other optimizations may have
   4577   // made blocks unreachable that were previously reachable.
   4578   Run<HMarkUnreachableBlocksPhase>();
   4579 
   4580   return true;
   4581 }
   4582 
   4583 
   4584 void HGraph::RestoreActualValues() {
   4585   HPhase phase("H_Restore actual values", this);
   4586 
   4587   for (int block_index = 0; block_index < blocks()->length(); block_index++) {
   4588     HBasicBlock* block = blocks()->at(block_index);
   4589 
   4590 #ifdef DEBUG
   4591     for (int i = 0; i < block->phis()->length(); i++) {
   4592       HPhi* phi = block->phis()->at(i);
   4593       DCHECK(phi->ActualValue() == phi);
   4594     }
   4595 #endif
   4596 
   4597     for (HInstructionIterator it(block); !it.Done(); it.Advance()) {
   4598       HInstruction* instruction = it.Current();
   4599       if (instruction->ActualValue() == instruction) continue;
   4600       if (instruction->CheckFlag(HValue::kIsDead)) {
   4601         // The instruction was marked as deleted but left in the graph
   4602         // as a control flow dependency point for subsequent
   4603         // instructions.
   4604         instruction->DeleteAndReplaceWith(instruction->ActualValue());
   4605       } else {
   4606         DCHECK(instruction->IsInformativeDefinition());
   4607         if (instruction->IsPurelyInformativeDefinition()) {
   4608           instruction->DeleteAndReplaceWith(instruction->RedefinedOperand());
   4609         } else {
   4610           instruction->ReplaceAllUsesWith(instruction->ActualValue());
   4611         }
   4612       }
   4613     }
   4614   }
   4615 }
   4616 
   4617 
   4618 void HOptimizedGraphBuilder::PushArgumentsFromEnvironment(int count) {
   4619   ZoneList<HValue*> arguments(count, zone());
   4620   for (int i = 0; i < count; ++i) {
   4621     arguments.Add(Pop(), zone());
   4622   }
   4623 
   4624   HPushArguments* push_args = New<HPushArguments>();
   4625   while (!arguments.is_empty()) {
   4626     push_args->AddInput(arguments.RemoveLast());
   4627   }
   4628   AddInstruction(push_args);
   4629 }
   4630 
   4631 
   4632 template <class Instruction>
   4633 HInstruction* HOptimizedGraphBuilder::PreProcessCall(Instruction* call) {
   4634   PushArgumentsFromEnvironment(call->argument_count());
   4635   return call;
   4636 }
   4637 
   4638 
   4639 void HOptimizedGraphBuilder::SetUpScope(Scope* scope) {
   4640   HEnvironment* prolog_env = environment();
   4641   int parameter_count = environment()->parameter_count();
   4642   ZoneList<HValue*> parameters(parameter_count, zone());
   4643   for (int i = 0; i < parameter_count; ++i) {
   4644     HInstruction* parameter = Add<HParameter>(static_cast<unsigned>(i));
   4645     parameters.Add(parameter, zone());
   4646     environment()->Bind(i, parameter);
   4647   }
   4648 
   4649   HConstant* undefined_constant = graph()->GetConstantUndefined();
   4650   // Initialize specials and locals to undefined.
   4651   for (int i = parameter_count + 1; i < environment()->length(); ++i) {
   4652     environment()->Bind(i, undefined_constant);
   4653   }
   4654   Add<HPrologue>();
   4655 
   4656   HEnvironment* initial_env = environment()->CopyWithoutHistory();
   4657   HBasicBlock* body_entry = CreateBasicBlock(initial_env);
   4658   GotoNoSimulate(body_entry);
   4659   set_current_block(body_entry);
   4660 
   4661   // Initialize context of prolog environment to undefined.
   4662   prolog_env->BindContext(undefined_constant);
   4663 
   4664   // First special is HContext.
   4665   HInstruction* context = Add<HContext>();
   4666   environment()->BindContext(context);
   4667 
   4668   // Create an arguments object containing the initial parameters.  Set the
   4669   // initial values of parameters including "this" having parameter index 0.
   4670   DCHECK_EQ(scope->num_parameters() + 1, parameter_count);
   4671   HArgumentsObject* arguments_object = New<HArgumentsObject>(parameter_count);
   4672   for (int i = 0; i < parameter_count; ++i) {
   4673     HValue* parameter = parameters.at(i);
   4674     arguments_object->AddArgument(parameter, zone());
   4675   }
   4676 
   4677   AddInstruction(arguments_object);
   4678   graph()->SetArgumentsObject(arguments_object);
   4679 
   4680   // Handle the arguments and arguments shadow variables specially (they do
   4681   // not have declarations).
   4682   if (scope->arguments() != NULL) {
   4683     environment()->Bind(scope->arguments(), graph()->GetArgumentsObject());
   4684   }
   4685 
   4686   int rest_index;
   4687   Variable* rest = scope->rest_parameter(&rest_index);
   4688   if (rest) {
   4689     return Bailout(kRestParameter);
   4690   }
   4691 
   4692   if (scope->this_function_var() != nullptr ||
   4693       scope->new_target_var() != nullptr) {
   4694     return Bailout(kSuperReference);
   4695   }
   4696 
   4697   // Trace the call.
   4698   if (FLAG_trace && top_info()->IsOptimizing()) {
   4699     Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kTraceEnter), 0);
   4700   }
   4701 }
   4702 
   4703 
   4704 void HOptimizedGraphBuilder::VisitStatements(ZoneList<Statement*>* statements) {
   4705   for (int i = 0; i < statements->length(); i++) {
   4706     Statement* stmt = statements->at(i);
   4707     CHECK_ALIVE(Visit(stmt));
   4708     if (stmt->IsJump()) break;
   4709   }
   4710 }
   4711 
   4712 
   4713 void HOptimizedGraphBuilder::VisitBlock(Block* stmt) {
   4714   DCHECK(!HasStackOverflow());
   4715   DCHECK(current_block() != NULL);
   4716   DCHECK(current_block()->HasPredecessor());
   4717 
   4718   Scope* outer_scope = scope();
   4719   Scope* scope = stmt->scope();
   4720   BreakAndContinueInfo break_info(stmt, outer_scope);
   4721 
   4722   { BreakAndContinueScope push(&break_info, this);
   4723     if (scope != NULL) {
   4724       if (scope->NeedsContext()) {
   4725         // Load the function object.
   4726         Scope* declaration_scope = scope->DeclarationScope();
   4727         HInstruction* function;
   4728         HValue* outer_context = environment()->context();
   4729         if (declaration_scope->is_script_scope() ||
   4730             declaration_scope->is_eval_scope()) {
   4731           function = new (zone())
   4732               HLoadContextSlot(outer_context, Context::CLOSURE_INDEX,
   4733                                HLoadContextSlot::kNoCheck);
   4734         } else {
   4735           function = New<HThisFunction>();
   4736         }
   4737         AddInstruction(function);
   4738         // Allocate a block context and store it to the stack frame.
   4739         HInstruction* inner_context = Add<HAllocateBlockContext>(
   4740             outer_context, function, scope->GetScopeInfo(isolate()));
   4741         HInstruction* instr = Add<HStoreFrameContext>(inner_context);
   4742         set_scope(scope);
   4743         environment()->BindContext(inner_context);
   4744         if (instr->HasObservableSideEffects()) {
   4745           AddSimulate(stmt->EntryId(), REMOVABLE_SIMULATE);
   4746         }
   4747       }
   4748       VisitDeclarations(scope->declarations());
   4749       AddSimulate(stmt->DeclsId(), REMOVABLE_SIMULATE);
   4750     }
   4751     CHECK_BAILOUT(VisitStatements(stmt->statements()));
   4752   }
   4753   set_scope(outer_scope);
   4754   if (scope != NULL && current_block() != NULL &&
   4755       scope->ContextLocalCount() > 0) {
   4756     HValue* inner_context = environment()->context();
   4757     HValue* outer_context = Add<HLoadNamedField>(
   4758         inner_context, nullptr,
   4759         HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
   4760 
   4761     HInstruction* instr = Add<HStoreFrameContext>(outer_context);
   4762     environment()->BindContext(outer_context);
   4763     if (instr->HasObservableSideEffects()) {
   4764       AddSimulate(stmt->ExitId(), REMOVABLE_SIMULATE);
   4765     }
   4766   }
   4767   HBasicBlock* break_block = break_info.break_block();
   4768   if (break_block != NULL) {
   4769     if (current_block() != NULL) Goto(break_block);
   4770     break_block->SetJoinId(stmt->ExitId());
   4771     set_current_block(break_block);
   4772   }
   4773 }
   4774 
   4775 
   4776 void HOptimizedGraphBuilder::VisitExpressionStatement(
   4777     ExpressionStatement* stmt) {
   4778   DCHECK(!HasStackOverflow());
   4779   DCHECK(current_block() != NULL);
   4780   DCHECK(current_block()->HasPredecessor());
   4781   VisitForEffect(stmt->expression());
   4782 }
   4783 
   4784 
   4785 void HOptimizedGraphBuilder::VisitEmptyStatement(EmptyStatement* stmt) {
   4786   DCHECK(!HasStackOverflow());
   4787   DCHECK(current_block() != NULL);
   4788   DCHECK(current_block()->HasPredecessor());
   4789 }
   4790 
   4791 
   4792 void HOptimizedGraphBuilder::VisitSloppyBlockFunctionStatement(
   4793     SloppyBlockFunctionStatement* stmt) {
   4794   Visit(stmt->statement());
   4795 }
   4796 
   4797 
   4798 void HOptimizedGraphBuilder::VisitIfStatement(IfStatement* stmt) {
   4799   DCHECK(!HasStackOverflow());
   4800   DCHECK(current_block() != NULL);
   4801   DCHECK(current_block()->HasPredecessor());
   4802   if (stmt->condition()->ToBooleanIsTrue()) {
   4803     Add<HSimulate>(stmt->ThenId());
   4804     Visit(stmt->then_statement());
   4805   } else if (stmt->condition()->ToBooleanIsFalse()) {
   4806     Add<HSimulate>(stmt->ElseId());
   4807     Visit(stmt->else_statement());
   4808   } else {
   4809     HBasicBlock* cond_true = graph()->CreateBasicBlock();
   4810     HBasicBlock* cond_false = graph()->CreateBasicBlock();
   4811     CHECK_BAILOUT(VisitForControl(stmt->condition(), cond_true, cond_false));
   4812 
   4813     if (cond_true->HasPredecessor()) {
   4814       cond_true->SetJoinId(stmt->ThenId());
   4815       set_current_block(cond_true);
   4816       CHECK_BAILOUT(Visit(stmt->then_statement()));
   4817       cond_true = current_block();
   4818     } else {
   4819       cond_true = NULL;
   4820     }
   4821 
   4822     if (cond_false->HasPredecessor()) {
   4823       cond_false->SetJoinId(stmt->ElseId());
   4824       set_current_block(cond_false);
   4825       CHECK_BAILOUT(Visit(stmt->else_statement()));
   4826       cond_false = current_block();
   4827     } else {
   4828       cond_false = NULL;
   4829     }
   4830 
   4831     HBasicBlock* join = CreateJoin(cond_true, cond_false, stmt->IfId());
   4832     set_current_block(join);
   4833   }
   4834 }
   4835 
   4836 
   4837 HBasicBlock* HOptimizedGraphBuilder::BreakAndContinueScope::Get(
   4838     BreakableStatement* stmt,
   4839     BreakType type,
   4840     Scope** scope,
   4841     int* drop_extra) {
   4842   *drop_extra = 0;
   4843   BreakAndContinueScope* current = this;
   4844   while (current != NULL && current->info()->target() != stmt) {
   4845     *drop_extra += current->info()->drop_extra();
   4846     current = current->next();
   4847   }
   4848   DCHECK(current != NULL);  // Always found (unless stack is malformed).
   4849   *scope = current->info()->scope();
   4850 
   4851   if (type == BREAK) {
   4852     *drop_extra += current->info()->drop_extra();
   4853   }
   4854 
   4855   HBasicBlock* block = NULL;
   4856   switch (type) {
   4857     case BREAK:
   4858       block = current->info()->break_block();
   4859       if (block == NULL) {
   4860         block = current->owner()->graph()->CreateBasicBlock();
   4861         current->info()->set_break_block(block);
   4862       }
   4863       break;
   4864 
   4865     case CONTINUE:
   4866       block = current->info()->continue_block();
   4867       if (block == NULL) {
   4868         block = current->owner()->graph()->CreateBasicBlock();
   4869         current->info()->set_continue_block(block);
   4870       }
   4871       break;
   4872   }
   4873 
   4874   return block;
   4875 }
   4876 
   4877 
   4878 void HOptimizedGraphBuilder::VisitContinueStatement(
   4879     ContinueStatement* stmt) {
   4880   DCHECK(!HasStackOverflow());
   4881   DCHECK(current_block() != NULL);
   4882   DCHECK(current_block()->HasPredecessor());
   4883   Scope* outer_scope = NULL;
   4884   Scope* inner_scope = scope();
   4885   int drop_extra = 0;
   4886   HBasicBlock* continue_block = break_scope()->Get(
   4887       stmt->target(), BreakAndContinueScope::CONTINUE,
   4888       &outer_scope, &drop_extra);
   4889   HValue* context = environment()->context();
   4890   Drop(drop_extra);
   4891   int context_pop_count = inner_scope->ContextChainLength(outer_scope);
   4892   if (context_pop_count > 0) {
   4893     while (context_pop_count-- > 0) {
   4894       HInstruction* context_instruction = Add<HLoadNamedField>(
   4895           context, nullptr,
   4896           HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
   4897       context = context_instruction;
   4898     }
   4899     HInstruction* instr = Add<HStoreFrameContext>(context);
   4900     if (instr->HasObservableSideEffects()) {
   4901       AddSimulate(stmt->target()->EntryId(), REMOVABLE_SIMULATE);
   4902     }
   4903     environment()->BindContext(context);
   4904   }
   4905 
   4906   Goto(continue_block);
   4907   set_current_block(NULL);
   4908 }
   4909 
   4910 
   4911 void HOptimizedGraphBuilder::VisitBreakStatement(BreakStatement* stmt) {
   4912   DCHECK(!HasStackOverflow());
   4913   DCHECK(current_block() != NULL);
   4914   DCHECK(current_block()->HasPredecessor());
   4915   Scope* outer_scope = NULL;
   4916   Scope* inner_scope = scope();
   4917   int drop_extra = 0;
   4918   HBasicBlock* break_block = break_scope()->Get(
   4919       stmt->target(), BreakAndContinueScope::BREAK,
   4920       &outer_scope, &drop_extra);
   4921   HValue* context = environment()->context();
   4922   Drop(drop_extra);
   4923   int context_pop_count = inner_scope->ContextChainLength(outer_scope);
   4924   if (context_pop_count > 0) {
   4925     while (context_pop_count-- > 0) {
   4926       HInstruction* context_instruction = Add<HLoadNamedField>(
   4927           context, nullptr,
   4928           HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
   4929       context = context_instruction;
   4930     }
   4931     HInstruction* instr = Add<HStoreFrameContext>(context);
   4932     if (instr->HasObservableSideEffects()) {
   4933       AddSimulate(stmt->target()->ExitId(), REMOVABLE_SIMULATE);
   4934     }
   4935     environment()->BindContext(context);
   4936   }
   4937   Goto(break_block);
   4938   set_current_block(NULL);
   4939 }
   4940 
   4941 
   4942 void HOptimizedGraphBuilder::VisitReturnStatement(ReturnStatement* stmt) {
   4943   DCHECK(!HasStackOverflow());
   4944   DCHECK(current_block() != NULL);
   4945   DCHECK(current_block()->HasPredecessor());
   4946   FunctionState* state = function_state();
   4947   AstContext* context = call_context();
   4948   if (context == NULL) {
   4949     // Not an inlined return, so an actual one.
   4950     CHECK_ALIVE(VisitForValue(stmt->expression()));
   4951     HValue* result = environment()->Pop();
   4952     Add<HReturn>(result);
   4953   } else if (state->inlining_kind() == CONSTRUCT_CALL_RETURN) {
   4954     // Return from an inlined construct call. In a test context the return value
   4955     // will always evaluate to true, in a value context the return value needs
   4956     // to be a JSObject.
   4957     if (context->IsTest()) {
   4958       TestContext* test = TestContext::cast(context);
   4959       CHECK_ALIVE(VisitForEffect(stmt->expression()));
   4960       Goto(test->if_true(), state);
   4961     } else if (context->IsEffect()) {
   4962       CHECK_ALIVE(VisitForEffect(stmt->expression()));
   4963       Goto(function_return(), state);
   4964     } else {
   4965       DCHECK(context->IsValue());
   4966       CHECK_ALIVE(VisitForValue(stmt->expression()));
   4967       HValue* return_value = Pop();
   4968       HValue* receiver = environment()->arguments_environment()->Lookup(0);
   4969       HHasInstanceTypeAndBranch* typecheck =
   4970           New<HHasInstanceTypeAndBranch>(return_value,
   4971                                          FIRST_JS_RECEIVER_TYPE,
   4972                                          LAST_JS_RECEIVER_TYPE);
   4973       HBasicBlock* if_spec_object = graph()->CreateBasicBlock();
   4974       HBasicBlock* not_spec_object = graph()->CreateBasicBlock();
   4975       typecheck->SetSuccessorAt(0, if_spec_object);
   4976       typecheck->SetSuccessorAt(1, not_spec_object);
   4977       FinishCurrentBlock(typecheck);
   4978       AddLeaveInlined(if_spec_object, return_value, state);
   4979       AddLeaveInlined(not_spec_object, receiver, state);
   4980     }
   4981   } else if (state->inlining_kind() == SETTER_CALL_RETURN) {
   4982     // Return from an inlined setter call. The returned value is never used, the
   4983     // value of an assignment is always the value of the RHS of the assignment.
   4984     CHECK_ALIVE(VisitForEffect(stmt->expression()));
   4985     if (context->IsTest()) {
   4986       HValue* rhs = environment()->arguments_environment()->Lookup(1);
   4987       context->ReturnValue(rhs);
   4988     } else if (context->IsEffect()) {
   4989       Goto(function_return(), state);
   4990     } else {
   4991       DCHECK(context->IsValue());
   4992       HValue* rhs = environment()->arguments_environment()->Lookup(1);
   4993       AddLeaveInlined(rhs, state);
   4994     }
   4995   } else {
   4996     // Return from a normal inlined function. Visit the subexpression in the
   4997     // expression context of the call.
   4998     if (context->IsTest()) {
   4999       TestContext* test = TestContext::cast(context);
   5000       VisitForControl(stmt->expression(), test->if_true(), test->if_false());
   5001     } else if (context->IsEffect()) {
   5002       // Visit in value context and ignore the result. This is needed to keep
   5003       // environment in sync with full-codegen since some visitors (e.g.
   5004       // VisitCountOperation) use the operand stack differently depending on
   5005       // context.
   5006       CHECK_ALIVE(VisitForValue(stmt->expression()));
   5007       Pop();
   5008       Goto(function_return(), state);
   5009     } else {
   5010       DCHECK(context->IsValue());
   5011       CHECK_ALIVE(VisitForValue(stmt->expression()));
   5012       AddLeaveInlined(Pop(), state);
   5013     }
   5014   }
   5015   set_current_block(NULL);
   5016 }
   5017 
   5018 
   5019 void HOptimizedGraphBuilder::VisitWithStatement(WithStatement* stmt) {
   5020   DCHECK(!HasStackOverflow());
   5021   DCHECK(current_block() != NULL);
   5022   DCHECK(current_block()->HasPredecessor());
   5023   return Bailout(kWithStatement);
   5024 }
   5025 
   5026 
   5027 void HOptimizedGraphBuilder::VisitSwitchStatement(SwitchStatement* stmt) {
   5028   DCHECK(!HasStackOverflow());
   5029   DCHECK(current_block() != NULL);
   5030   DCHECK(current_block()->HasPredecessor());
   5031 
   5032   ZoneList<CaseClause*>* clauses = stmt->cases();
   5033   int clause_count = clauses->length();
   5034   ZoneList<HBasicBlock*> body_blocks(clause_count, zone());
   5035 
   5036   CHECK_ALIVE(VisitForValue(stmt->tag()));
   5037   Add<HSimulate>(stmt->EntryId());
   5038   HValue* tag_value = Top();
   5039   Type* tag_type = stmt->tag()->bounds().lower;
   5040 
   5041   // 1. Build all the tests, with dangling true branches
   5042   BailoutId default_id = BailoutId::None();
   5043   for (int i = 0; i < clause_count; ++i) {
   5044     CaseClause* clause = clauses->at(i);
   5045     if (clause->is_default()) {
   5046       body_blocks.Add(NULL, zone());
   5047       if (default_id.IsNone()) default_id = clause->EntryId();
   5048       continue;
   5049     }
   5050 
   5051     // Generate a compare and branch.
   5052     CHECK_BAILOUT(VisitForValue(clause->label()));
   5053     if (current_block() == NULL) return Bailout(kUnsupportedSwitchStatement);
   5054     HValue* label_value = Pop();
   5055 
   5056     Type* label_type = clause->label()->bounds().lower;
   5057     Type* combined_type = clause->compare_type();
   5058     HControlInstruction* compare = BuildCompareInstruction(
   5059         Token::EQ_STRICT, tag_value, label_value, tag_type, label_type,
   5060         combined_type,
   5061         ScriptPositionToSourcePosition(stmt->tag()->position()),
   5062         ScriptPositionToSourcePosition(clause->label()->position()),
   5063         PUSH_BEFORE_SIMULATE, clause->id());
   5064 
   5065     HBasicBlock* next_test_block = graph()->CreateBasicBlock();
   5066     HBasicBlock* body_block = graph()->CreateBasicBlock();
   5067     body_blocks.Add(body_block, zone());
   5068     compare->SetSuccessorAt(0, body_block);
   5069     compare->SetSuccessorAt(1, next_test_block);
   5070     FinishCurrentBlock(compare);
   5071 
   5072     set_current_block(body_block);
   5073     Drop(1);  // tag_value
   5074 
   5075     set_current_block(next_test_block);
   5076   }
   5077 
   5078   // Save the current block to use for the default or to join with the
   5079   // exit.
   5080   HBasicBlock* last_block = current_block();
   5081   Drop(1);  // tag_value
   5082 
   5083   // 2. Loop over the clauses and the linked list of tests in lockstep,
   5084   // translating the clause bodies.
   5085   HBasicBlock* fall_through_block = NULL;
   5086 
   5087   BreakAndContinueInfo break_info(stmt, scope());
   5088   { BreakAndContinueScope push(&break_info, this);
   5089     for (int i = 0; i < clause_count; ++i) {
   5090       CaseClause* clause = clauses->at(i);
   5091 
   5092       // Identify the block where normal (non-fall-through) control flow
   5093       // goes to.
   5094       HBasicBlock* normal_block = NULL;
   5095       if (clause->is_default()) {
   5096         if (last_block == NULL) continue;
   5097         normal_block = last_block;
   5098         last_block = NULL;  // Cleared to indicate we've handled it.
   5099       } else {
   5100         normal_block = body_blocks[i];
   5101       }
   5102 
   5103       if (fall_through_block == NULL) {
   5104         set_current_block(normal_block);
   5105       } else {
   5106         HBasicBlock* join = CreateJoin(fall_through_block,
   5107                                        normal_block,
   5108                                        clause->EntryId());
   5109         set_current_block(join);
   5110       }
   5111 
   5112       CHECK_BAILOUT(VisitStatements(clause->statements()));
   5113       fall_through_block = current_block();
   5114     }
   5115   }
   5116 
   5117   // Create an up-to-3-way join.  Use the break block if it exists since
   5118   // it's already a join block.
   5119   HBasicBlock* break_block = break_info.break_block();
   5120   if (break_block == NULL) {
   5121     set_current_block(CreateJoin(fall_through_block,
   5122                                  last_block,
   5123                                  stmt->ExitId()));
   5124   } else {
   5125     if (fall_through_block != NULL) Goto(fall_through_block, break_block);
   5126     if (last_block != NULL) Goto(last_block, break_block);
   5127     break_block->SetJoinId(stmt->ExitId());
   5128     set_current_block(break_block);
   5129   }
   5130 }
   5131 
   5132 
   5133 void HOptimizedGraphBuilder::VisitLoopBody(IterationStatement* stmt,
   5134                                            HBasicBlock* loop_entry) {
   5135   Add<HSimulate>(stmt->StackCheckId());
   5136   HStackCheck* stack_check =
   5137       HStackCheck::cast(Add<HStackCheck>(HStackCheck::kBackwardsBranch));
   5138   DCHECK(loop_entry->IsLoopHeader());
   5139   loop_entry->loop_information()->set_stack_check(stack_check);
   5140   CHECK_BAILOUT(Visit(stmt->body()));
   5141 }
   5142 
   5143 
   5144 void HOptimizedGraphBuilder::VisitDoWhileStatement(DoWhileStatement* stmt) {
   5145   DCHECK(!HasStackOverflow());
   5146   DCHECK(current_block() != NULL);
   5147   DCHECK(current_block()->HasPredecessor());
   5148   DCHECK(current_block() != NULL);
   5149   HBasicBlock* loop_entry = BuildLoopEntry(stmt);
   5150 
   5151   BreakAndContinueInfo break_info(stmt, scope());
   5152   {
   5153     BreakAndContinueScope push(&break_info, this);
   5154     CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry));
   5155   }
   5156   HBasicBlock* body_exit =
   5157       JoinContinue(stmt, current_block(), break_info.continue_block());
   5158   HBasicBlock* loop_successor = NULL;
   5159   if (body_exit != NULL && !stmt->cond()->ToBooleanIsTrue()) {
   5160     set_current_block(body_exit);
   5161     loop_successor = graph()->CreateBasicBlock();
   5162     if (stmt->cond()->ToBooleanIsFalse()) {
   5163       loop_entry->loop_information()->stack_check()->Eliminate();
   5164       Goto(loop_successor);
   5165       body_exit = NULL;
   5166     } else {
   5167       // The block for a true condition, the actual predecessor block of the
   5168       // back edge.
   5169       body_exit = graph()->CreateBasicBlock();
   5170       CHECK_BAILOUT(VisitForControl(stmt->cond(), body_exit, loop_successor));
   5171     }
   5172     if (body_exit != NULL && body_exit->HasPredecessor()) {
   5173       body_exit->SetJoinId(stmt->BackEdgeId());
   5174     } else {
   5175       body_exit = NULL;
   5176     }
   5177     if (loop_successor->HasPredecessor()) {
   5178       loop_successor->SetJoinId(stmt->ExitId());
   5179     } else {
   5180       loop_successor = NULL;
   5181     }
   5182   }
   5183   HBasicBlock* loop_exit = CreateLoop(stmt,
   5184                                       loop_entry,
   5185                                       body_exit,
   5186                                       loop_successor,
   5187                                       break_info.break_block());
   5188   set_current_block(loop_exit);
   5189 }
   5190 
   5191 
   5192 void HOptimizedGraphBuilder::VisitWhileStatement(WhileStatement* stmt) {
   5193   DCHECK(!HasStackOverflow());
   5194   DCHECK(current_block() != NULL);
   5195   DCHECK(current_block()->HasPredecessor());
   5196   DCHECK(current_block() != NULL);
   5197   HBasicBlock* loop_entry = BuildLoopEntry(stmt);
   5198 
   5199   // If the condition is constant true, do not generate a branch.
   5200   HBasicBlock* loop_successor = NULL;
   5201   if (!stmt->cond()->ToBooleanIsTrue()) {
   5202     HBasicBlock* body_entry = graph()->CreateBasicBlock();
   5203     loop_successor = graph()->CreateBasicBlock();
   5204     CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor));
   5205     if (body_entry->HasPredecessor()) {
   5206       body_entry->SetJoinId(stmt->BodyId());
   5207       set_current_block(body_entry);
   5208     }
   5209     if (loop_successor->HasPredecessor()) {
   5210       loop_successor->SetJoinId(stmt->ExitId());
   5211     } else {
   5212       loop_successor = NULL;
   5213     }
   5214   }
   5215 
   5216   BreakAndContinueInfo break_info(stmt, scope());
   5217   if (current_block() != NULL) {
   5218     BreakAndContinueScope push(&break_info, this);
   5219     CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry));
   5220   }
   5221   HBasicBlock* body_exit =
   5222       JoinContinue(stmt, current_block(), break_info.continue_block());
   5223   HBasicBlock* loop_exit = CreateLoop(stmt,
   5224                                       loop_entry,
   5225                                       body_exit,
   5226                                       loop_successor,
   5227                                       break_info.break_block());
   5228   set_current_block(loop_exit);
   5229 }
   5230 
   5231 
   5232 void HOptimizedGraphBuilder::VisitForStatement(ForStatement* stmt) {
   5233   DCHECK(!HasStackOverflow());
   5234   DCHECK(current_block() != NULL);
   5235   DCHECK(current_block()->HasPredecessor());
   5236   if (stmt->init() != NULL) {
   5237     CHECK_ALIVE(Visit(stmt->init()));
   5238   }
   5239   DCHECK(current_block() != NULL);
   5240   HBasicBlock* loop_entry = BuildLoopEntry(stmt);
   5241 
   5242   HBasicBlock* loop_successor = NULL;
   5243   if (stmt->cond() != NULL) {
   5244     HBasicBlock* body_entry = graph()->CreateBasicBlock();
   5245     loop_successor = graph()->CreateBasicBlock();
   5246     CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor));
   5247     if (body_entry->HasPredecessor()) {
   5248       body_entry->SetJoinId(stmt->BodyId());
   5249       set_current_block(body_entry);
   5250     }
   5251     if (loop_successor->HasPredecessor()) {
   5252       loop_successor->SetJoinId(stmt->ExitId());
   5253     } else {
   5254       loop_successor = NULL;
   5255     }
   5256   }
   5257 
   5258   BreakAndContinueInfo break_info(stmt, scope());
   5259   if (current_block() != NULL) {
   5260     BreakAndContinueScope push(&break_info, this);
   5261     CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry));
   5262   }
   5263   HBasicBlock* body_exit =
   5264       JoinContinue(stmt, current_block(), break_info.continue_block());
   5265 
   5266   if (stmt->next() != NULL && body_exit != NULL) {
   5267     set_current_block(body_exit);
   5268     CHECK_BAILOUT(Visit(stmt->next()));
   5269     body_exit = current_block();
   5270   }
   5271 
   5272   HBasicBlock* loop_exit = CreateLoop(stmt,
   5273                                       loop_entry,
   5274                                       body_exit,
   5275                                       loop_successor,
   5276                                       break_info.break_block());
   5277   set_current_block(loop_exit);
   5278 }
   5279 
   5280 
   5281 void HOptimizedGraphBuilder::VisitForInStatement(ForInStatement* stmt) {
   5282   DCHECK(!HasStackOverflow());
   5283   DCHECK(current_block() != NULL);
   5284   DCHECK(current_block()->HasPredecessor());
   5285 
   5286   if (!FLAG_optimize_for_in) {
   5287     return Bailout(kForInStatementOptimizationIsDisabled);
   5288   }
   5289 
   5290   if (!stmt->each()->IsVariableProxy() ||
   5291       !stmt->each()->AsVariableProxy()->var()->IsStackLocal()) {
   5292     return Bailout(kForInStatementWithNonLocalEachVariable);
   5293   }
   5294 
   5295   Variable* each_var = stmt->each()->AsVariableProxy()->var();
   5296 
   5297   CHECK_ALIVE(VisitForValue(stmt->enumerable()));
   5298   HValue* enumerable = Top();  // Leave enumerable at the top.
   5299 
   5300   IfBuilder if_undefined_or_null(this);
   5301   if_undefined_or_null.If<HCompareObjectEqAndBranch>(
   5302       enumerable, graph()->GetConstantUndefined());
   5303   if_undefined_or_null.Or();
   5304   if_undefined_or_null.If<HCompareObjectEqAndBranch>(
   5305       enumerable, graph()->GetConstantNull());
   5306   if_undefined_or_null.ThenDeopt(Deoptimizer::kUndefinedOrNullInForIn);
   5307   if_undefined_or_null.End();
   5308   BuildForInBody(stmt, each_var, enumerable);
   5309 }
   5310 
   5311 
   5312 void HOptimizedGraphBuilder::BuildForInBody(ForInStatement* stmt,
   5313                                             Variable* each_var,
   5314                                             HValue* enumerable) {
   5315   HInstruction* map;
   5316   HInstruction* array;
   5317   HInstruction* enum_length;
   5318   bool fast = stmt->for_in_type() == ForInStatement::FAST_FOR_IN;
   5319   if (fast) {
   5320     map = Add<HForInPrepareMap>(enumerable);
   5321     Add<HSimulate>(stmt->PrepareId());
   5322 
   5323     array = Add<HForInCacheArray>(enumerable, map,
   5324                                   DescriptorArray::kEnumCacheBridgeCacheIndex);
   5325     enum_length = Add<HMapEnumLength>(map);
   5326 
   5327     HInstruction* index_cache = Add<HForInCacheArray>(
   5328         enumerable, map, DescriptorArray::kEnumCacheBridgeIndicesCacheIndex);
   5329     HForInCacheArray::cast(array)
   5330         ->set_index_cache(HForInCacheArray::cast(index_cache));
   5331   } else {
   5332     Add<HSimulate>(stmt->PrepareId());
   5333     {
   5334       NoObservableSideEffectsScope no_effects(this);
   5335       BuildJSObjectCheck(enumerable, 0);
   5336     }
   5337     Add<HSimulate>(stmt->ToObjectId());
   5338 
   5339     map = graph()->GetConstant1();
   5340     Runtime::FunctionId function_id = Runtime::kGetPropertyNamesFast;
   5341     Add<HPushArguments>(enumerable);
   5342     array = Add<HCallRuntime>(Runtime::FunctionForId(function_id), 1);
   5343     Push(array);
   5344     Add<HSimulate>(stmt->EnumId());
   5345     Drop(1);
   5346     Handle<Map> array_map = isolate()->factory()->fixed_array_map();
   5347     HValue* check = Add<HCheckMaps>(array, array_map);
   5348     enum_length = AddLoadFixedArrayLength(array, check);
   5349   }
   5350 
   5351   HInstruction* start_index = Add<HConstant>(0);
   5352 
   5353   Push(map);
   5354   Push(array);
   5355   Push(enum_length);
   5356   Push(start_index);
   5357 
   5358   HBasicBlock* loop_entry = BuildLoopEntry(stmt);
   5359 
   5360   // Reload the values to ensure we have up-to-date values inside of the loop.
   5361   // This is relevant especially for OSR where the values don't come from the
   5362   // computation above, but from the OSR entry block.
   5363   enumerable = environment()->ExpressionStackAt(4);
   5364   HValue* index = environment()->ExpressionStackAt(0);
   5365   HValue* limit = environment()->ExpressionStackAt(1);
   5366 
   5367   // Check that we still have more keys.
   5368   HCompareNumericAndBranch* compare_index =
   5369       New<HCompareNumericAndBranch>(index, limit, Token::LT);
   5370   compare_index->set_observed_input_representation(
   5371       Representation::Smi(), Representation::Smi());
   5372 
   5373   HBasicBlock* loop_body = graph()->CreateBasicBlock();
   5374   HBasicBlock* loop_successor = graph()->CreateBasicBlock();
   5375 
   5376   compare_index->SetSuccessorAt(0, loop_body);
   5377   compare_index->SetSuccessorAt(1, loop_successor);
   5378   FinishCurrentBlock(compare_index);
   5379 
   5380   set_current_block(loop_successor);
   5381   Drop(5);
   5382 
   5383   set_current_block(loop_body);
   5384 
   5385   HValue* key =
   5386       Add<HLoadKeyed>(environment()->ExpressionStackAt(2),  // Enum cache.
   5387                       index, index, nullptr, FAST_ELEMENTS);
   5388 
   5389   if (fast) {
   5390     // Check if the expected map still matches that of the enumerable.
   5391     // If not just deoptimize.
   5392     Add<HCheckMapValue>(enumerable, environment()->ExpressionStackAt(3));
   5393     Bind(each_var, key);
   5394   } else {
   5395     Add<HPushArguments>(enumerable, key);
   5396     Runtime::FunctionId function_id = Runtime::kForInFilter;
   5397     key = Add<HCallRuntime>(Runtime::FunctionForId(function_id), 2);
   5398     Push(key);
   5399     Add<HSimulate>(stmt->FilterId());
   5400     key = Pop();
   5401     Bind(each_var, key);
   5402     IfBuilder if_undefined(this);
   5403     if_undefined.If<HCompareObjectEqAndBranch>(key,
   5404                                                graph()->GetConstantUndefined());
   5405     if_undefined.ThenDeopt(Deoptimizer::kUndefined);
   5406     if_undefined.End();
   5407     Add<HSimulate>(stmt->AssignmentId());
   5408   }
   5409 
   5410   BreakAndContinueInfo break_info(stmt, scope(), 5);
   5411   {
   5412     BreakAndContinueScope push(&break_info, this);
   5413     CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry));
   5414   }
   5415 
   5416   HBasicBlock* body_exit =
   5417       JoinContinue(stmt, current_block(), break_info.continue_block());
   5418 
   5419   if (body_exit != NULL) {
   5420     set_current_block(body_exit);
   5421 
   5422     HValue* current_index = Pop();
   5423     Push(AddUncasted<HAdd>(current_index, graph()->GetConstant1()));
   5424     body_exit = current_block();
   5425   }
   5426 
   5427   HBasicBlock* loop_exit = CreateLoop(stmt,
   5428                                       loop_entry,
   5429                                       body_exit,
   5430                                       loop_successor,
   5431                                       break_info.break_block());
   5432 
   5433   set_current_block(loop_exit);
   5434 }
   5435 
   5436 
   5437 void HOptimizedGraphBuilder::VisitForOfStatement(ForOfStatement* stmt) {
   5438   DCHECK(!HasStackOverflow());
   5439   DCHECK(current_block() != NULL);
   5440   DCHECK(current_block()->HasPredecessor());
   5441   return Bailout(kForOfStatement);
   5442 }
   5443 
   5444 
   5445 void HOptimizedGraphBuilder::VisitTryCatchStatement(TryCatchStatement* stmt) {
   5446   DCHECK(!HasStackOverflow());
   5447   DCHECK(current_block() != NULL);
   5448   DCHECK(current_block()->HasPredecessor());
   5449   return Bailout(kTryCatchStatement);
   5450 }
   5451 
   5452 
   5453 void HOptimizedGraphBuilder::VisitTryFinallyStatement(
   5454     TryFinallyStatement* stmt) {
   5455   DCHECK(!HasStackOverflow());
   5456   DCHECK(current_block() != NULL);
   5457   DCHECK(current_block()->HasPredecessor());
   5458   return Bailout(kTryFinallyStatement);
   5459 }
   5460 
   5461 
   5462 void HOptimizedGraphBuilder::VisitDebuggerStatement(DebuggerStatement* stmt) {
   5463   DCHECK(!HasStackOverflow());
   5464   DCHECK(current_block() != NULL);
   5465   DCHECK(current_block()->HasPredecessor());
   5466   return Bailout(kDebuggerStatement);
   5467 }
   5468 
   5469 
   5470 void HOptimizedGraphBuilder::VisitCaseClause(CaseClause* clause) {
   5471   UNREACHABLE();
   5472 }
   5473 
   5474 
   5475 void HOptimizedGraphBuilder::VisitFunctionLiteral(FunctionLiteral* expr) {
   5476   DCHECK(!HasStackOverflow());
   5477   DCHECK(current_block() != NULL);
   5478   DCHECK(current_block()->HasPredecessor());
   5479   Handle<SharedFunctionInfo> shared_info = Compiler::GetSharedFunctionInfo(
   5480       expr, current_info()->script(), top_info());
   5481   // We also have a stack overflow if the recursive compilation did.
   5482   if (HasStackOverflow()) return;
   5483   // Use the fast case closure allocation code that allocates in new
   5484   // space for nested functions that don't need literals cloning.
   5485   HConstant* shared_info_value = Add<HConstant>(shared_info);
   5486   HInstruction* instr;
   5487   if (!expr->pretenure() && shared_info->num_literals() == 0) {
   5488     FastNewClosureStub stub(isolate(), shared_info->language_mode(),
   5489                             shared_info->kind());
   5490     FastNewClosureDescriptor descriptor(isolate());
   5491     HValue* values[] = {context(), shared_info_value};
   5492     HConstant* stub_value = Add<HConstant>(stub.GetCode());
   5493     instr = New<HCallWithDescriptor>(stub_value, 0, descriptor,
   5494                                      Vector<HValue*>(values, arraysize(values)),
   5495                                      NORMAL_CALL);
   5496   } else {
   5497     Add<HPushArguments>(shared_info_value);
   5498     Runtime::FunctionId function_id =
   5499         expr->pretenure() ? Runtime::kNewClosure_Tenured : Runtime::kNewClosure;
   5500     instr = New<HCallRuntime>(Runtime::FunctionForId(function_id), 1);
   5501   }
   5502   return ast_context()->ReturnInstruction(instr, expr->id());
   5503 }
   5504 
   5505 
   5506 void HOptimizedGraphBuilder::VisitClassLiteral(ClassLiteral* lit) {
   5507   DCHECK(!HasStackOverflow());
   5508   DCHECK(current_block() != NULL);
   5509   DCHECK(current_block()->HasPredecessor());
   5510   return Bailout(kClassLiteral);
   5511 }
   5512 
   5513 
   5514 void HOptimizedGraphBuilder::VisitNativeFunctionLiteral(
   5515     NativeFunctionLiteral* expr) {
   5516   DCHECK(!HasStackOverflow());
   5517   DCHECK(current_block() != NULL);
   5518   DCHECK(current_block()->HasPredecessor());
   5519   return Bailout(kNativeFunctionLiteral);
   5520 }
   5521 
   5522 
   5523 void HOptimizedGraphBuilder::VisitDoExpression(DoExpression* expr) {
   5524   DCHECK(!HasStackOverflow());
   5525   DCHECK(current_block() != NULL);
   5526   DCHECK(current_block()->HasPredecessor());
   5527   return Bailout(kDoExpression);
   5528 }
   5529 
   5530 
   5531 void HOptimizedGraphBuilder::VisitConditional(Conditional* expr) {
   5532   DCHECK(!HasStackOverflow());
   5533   DCHECK(current_block() != NULL);
   5534   DCHECK(current_block()->HasPredecessor());
   5535   HBasicBlock* cond_true = graph()->CreateBasicBlock();
   5536   HBasicBlock* cond_false = graph()->CreateBasicBlock();
   5537   CHECK_BAILOUT(VisitForControl(expr->condition(), cond_true, cond_false));
   5538 
   5539   // Visit the true and false subexpressions in the same AST context as the
   5540   // whole expression.
   5541   if (cond_true->HasPredecessor()) {
   5542     cond_true->SetJoinId(expr->ThenId());
   5543     set_current_block(cond_true);
   5544     CHECK_BAILOUT(Visit(expr->then_expression()));
   5545     cond_true = current_block();
   5546   } else {
   5547     cond_true = NULL;
   5548   }
   5549 
   5550   if (cond_false->HasPredecessor()) {
   5551     cond_false->SetJoinId(expr->ElseId());
   5552     set_current_block(cond_false);
   5553     CHECK_BAILOUT(Visit(expr->else_expression()));
   5554     cond_false = current_block();
   5555   } else {
   5556     cond_false = NULL;
   5557   }
   5558 
   5559   if (!ast_context()->IsTest()) {
   5560     HBasicBlock* join = CreateJoin(cond_true, cond_false, expr->id());
   5561     set_current_block(join);
   5562     if (join != NULL && !ast_context()->IsEffect()) {
   5563       return ast_context()->ReturnValue(Pop());
   5564     }
   5565   }
   5566 }
   5567 
   5568 
   5569 HOptimizedGraphBuilder::GlobalPropertyAccess
   5570 HOptimizedGraphBuilder::LookupGlobalProperty(Variable* var, LookupIterator* it,
   5571                                              PropertyAccessType access_type) {
   5572   if (var->is_this() || !current_info()->has_global_object()) {
   5573     return kUseGeneric;
   5574   }
   5575 
   5576   switch (it->state()) {
   5577     case LookupIterator::ACCESSOR:
   5578     case LookupIterator::ACCESS_CHECK:
   5579     case LookupIterator::INTERCEPTOR:
   5580     case LookupIterator::INTEGER_INDEXED_EXOTIC:
   5581     case LookupIterator::NOT_FOUND:
   5582       return kUseGeneric;
   5583     case LookupIterator::DATA:
   5584       if (access_type == STORE && it->IsReadOnly()) return kUseGeneric;
   5585       return kUseCell;
   5586     case LookupIterator::JSPROXY:
   5587     case LookupIterator::TRANSITION:
   5588       UNREACHABLE();
   5589   }
   5590   UNREACHABLE();
   5591   return kUseGeneric;
   5592 }
   5593 
   5594 
   5595 HValue* HOptimizedGraphBuilder::BuildContextChainWalk(Variable* var) {
   5596   DCHECK(var->IsContextSlot());
   5597   HValue* context = environment()->context();
   5598   int length = scope()->ContextChainLength(var->scope());
   5599   while (length-- > 0) {
   5600     context = Add<HLoadNamedField>(
   5601         context, nullptr,
   5602         HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
   5603   }
   5604   return context;
   5605 }
   5606 
   5607 
   5608 void HOptimizedGraphBuilder::VisitVariableProxy(VariableProxy* expr) {
   5609   DCHECK(!HasStackOverflow());
   5610   DCHECK(current_block() != NULL);
   5611   DCHECK(current_block()->HasPredecessor());
   5612   Variable* variable = expr->var();
   5613   switch (variable->location()) {
   5614     case VariableLocation::GLOBAL:
   5615     case VariableLocation::UNALLOCATED: {
   5616       if (IsLexicalVariableMode(variable->mode())) {
   5617         // TODO(rossberg): should this be an DCHECK?
   5618         return Bailout(kReferenceToGlobalLexicalVariable);
   5619       }
   5620       // Handle known global constants like 'undefined' specially to avoid a
   5621       // load from a global cell for them.
   5622       Handle<Object> constant_value =
   5623           isolate()->factory()->GlobalConstantFor(variable->name());
   5624       if (!constant_value.is_null()) {
   5625         HConstant* instr = New<HConstant>(constant_value);
   5626         return ast_context()->ReturnInstruction(instr, expr->id());
   5627       }
   5628 
   5629       Handle<JSGlobalObject> global(current_info()->global_object());
   5630 
   5631       // Lookup in script contexts.
   5632       {
   5633         Handle<ScriptContextTable> script_contexts(
   5634             global->native_context()->script_context_table());
   5635         ScriptContextTable::LookupResult lookup;
   5636         if (ScriptContextTable::Lookup(script_contexts, variable->name(),
   5637                                        &lookup)) {
   5638           Handle<Context> script_context = ScriptContextTable::GetContext(
   5639               script_contexts, lookup.context_index);
   5640           Handle<Object> current_value =
   5641               FixedArray::get(script_context, lookup.slot_index);
   5642 
   5643           // If the values is not the hole, it will stay initialized,
   5644           // so no need to generate a check.
   5645           if (*current_value == *isolate()->factory()->the_hole_value()) {
   5646             return Bailout(kReferenceToUninitializedVariable);
   5647           }
   5648           HInstruction* result = New<HLoadNamedField>(
   5649               Add<HConstant>(script_context), nullptr,
   5650               HObjectAccess::ForContextSlot(lookup.slot_index));
   5651           return ast_context()->ReturnInstruction(result, expr->id());
   5652         }
   5653       }
   5654 
   5655       LookupIterator it(global, variable->name(), LookupIterator::OWN);
   5656       GlobalPropertyAccess type = LookupGlobalProperty(variable, &it, LOAD);
   5657 
   5658       if (type == kUseCell) {
   5659         Handle<PropertyCell> cell = it.GetPropertyCell();
   5660         top_info()->dependencies()->AssumePropertyCell(cell);
   5661         auto cell_type = it.property_details().cell_type();
   5662         if (cell_type == PropertyCellType::kConstant ||
   5663             cell_type == PropertyCellType::kUndefined) {
   5664           Handle<Object> constant_object(cell->value(), isolate());
   5665           if (constant_object->IsConsString()) {
   5666             constant_object =
   5667                 String::Flatten(Handle<String>::cast(constant_object));
   5668           }
   5669           HConstant* constant = New<HConstant>(constant_object);
   5670           return ast_context()->ReturnInstruction(constant, expr->id());
   5671         } else {
   5672           auto access = HObjectAccess::ForPropertyCellValue();
   5673           UniqueSet<Map>* field_maps = nullptr;
   5674           if (cell_type == PropertyCellType::kConstantType) {
   5675             switch (cell->GetConstantType()) {
   5676               case PropertyCellConstantType::kSmi:
   5677                 access = access.WithRepresentation(Representation::Smi());
   5678                 break;
   5679               case PropertyCellConstantType::kStableMap: {
   5680                 // Check that the map really is stable. The heap object could
   5681                 // have mutated without the cell updating state. In that case,
   5682                 // make no promises about the loaded value except that it's a
   5683                 // heap object.
   5684                 access =
   5685                     access.WithRepresentation(Representation::HeapObject());
   5686                 Handle<Map> map(HeapObject::cast(cell->value())->map());
   5687                 if (map->is_stable()) {
   5688                   field_maps = new (zone())
   5689                       UniqueSet<Map>(Unique<Map>::CreateImmovable(map), zone());
   5690                 }
   5691                 break;
   5692               }
   5693             }
   5694           }
   5695           HConstant* cell_constant = Add<HConstant>(cell);
   5696           HLoadNamedField* instr;
   5697           if (field_maps == nullptr) {
   5698             instr = New<HLoadNamedField>(cell_constant, nullptr, access);
   5699           } else {
   5700             instr = New<HLoadNamedField>(cell_constant, nullptr, access,
   5701                                          field_maps, HType::HeapObject());
   5702           }
   5703           instr->ClearDependsOnFlag(kInobjectFields);
   5704           instr->SetDependsOnFlag(kGlobalVars);
   5705           return ast_context()->ReturnInstruction(instr, expr->id());
   5706         }
   5707       } else {
   5708         HValue* global_object = Add<HLoadNamedField>(
   5709             BuildGetNativeContext(), nullptr,
   5710             HObjectAccess::ForContextSlot(Context::EXTENSION_INDEX));
   5711         HLoadGlobalGeneric* instr = New<HLoadGlobalGeneric>(
   5712             global_object, variable->name(), ast_context()->typeof_mode());
   5713         instr->SetVectorAndSlot(handle(current_feedback_vector(), isolate()),
   5714                                 expr->VariableFeedbackSlot());
   5715         return ast_context()->ReturnInstruction(instr, expr->id());
   5716       }
   5717     }
   5718 
   5719     case VariableLocation::PARAMETER:
   5720     case VariableLocation::LOCAL: {
   5721       HValue* value = LookupAndMakeLive(variable);
   5722       if (value == graph()->GetConstantHole()) {
   5723         DCHECK(IsDeclaredVariableMode(variable->mode()) &&
   5724                variable->mode() != VAR);
   5725         return Bailout(kReferenceToUninitializedVariable);
   5726       }
   5727       return ast_context()->ReturnValue(value);
   5728     }
   5729 
   5730     case VariableLocation::CONTEXT: {
   5731       HValue* context = BuildContextChainWalk(variable);
   5732       HLoadContextSlot::Mode mode;
   5733       switch (variable->mode()) {
   5734         case LET:
   5735         case CONST:
   5736           mode = HLoadContextSlot::kCheckDeoptimize;
   5737           break;
   5738         case CONST_LEGACY:
   5739           mode = HLoadContextSlot::kCheckReturnUndefined;
   5740           break;
   5741         default:
   5742           mode = HLoadContextSlot::kNoCheck;
   5743           break;
   5744       }
   5745       HLoadContextSlot* instr =
   5746           new(zone()) HLoadContextSlot(context, variable->index(), mode);
   5747       return ast_context()->ReturnInstruction(instr, expr->id());
   5748     }
   5749 
   5750     case VariableLocation::LOOKUP:
   5751       return Bailout(kReferenceToAVariableWhichRequiresDynamicLookup);
   5752   }
   5753 }
   5754 
   5755 
   5756 void HOptimizedGraphBuilder::VisitLiteral(Literal* expr) {
   5757   DCHECK(!HasStackOverflow());
   5758   DCHECK(current_block() != NULL);
   5759   DCHECK(current_block()->HasPredecessor());
   5760   HConstant* instr = New<HConstant>(expr->value());
   5761   return ast_context()->ReturnInstruction(instr, expr->id());
   5762 }
   5763 
   5764 
   5765 void HOptimizedGraphBuilder::VisitRegExpLiteral(RegExpLiteral* expr) {
   5766   DCHECK(!HasStackOverflow());
   5767   DCHECK(current_block() != NULL);
   5768   DCHECK(current_block()->HasPredecessor());
   5769   Callable callable = CodeFactory::FastCloneRegExp(isolate());
   5770   HValue* values[] = {
   5771       context(), AddThisFunction(), Add<HConstant>(expr->literal_index()),
   5772       Add<HConstant>(expr->pattern()), Add<HConstant>(expr->flags())};
   5773   HConstant* stub_value = Add<HConstant>(callable.code());
   5774   HInstruction* instr = New<HCallWithDescriptor>(
   5775       stub_value, 0, callable.descriptor(),
   5776       Vector<HValue*>(values, arraysize(values)), NORMAL_CALL);
   5777   return ast_context()->ReturnInstruction(instr, expr->id());
   5778 }
   5779 
   5780 
   5781 static bool CanInlinePropertyAccess(Handle<Map> map) {
   5782   if (map->instance_type() == HEAP_NUMBER_TYPE) return true;
   5783   if (map->instance_type() < FIRST_NONSTRING_TYPE) return true;
   5784   return map->IsJSObjectMap() && !map->is_dictionary_map() &&
   5785          !map->has_named_interceptor() &&
   5786          // TODO(verwaest): Whitelist contexts to which we have access.
   5787          !map->is_access_check_needed();
   5788 }
   5789 
   5790 
   5791 // Determines whether the given array or object literal boilerplate satisfies
   5792 // all limits to be considered for fast deep-copying and computes the total
   5793 // size of all objects that are part of the graph.
   5794 static bool IsFastLiteral(Handle<JSObject> boilerplate,
   5795                           int max_depth,
   5796                           int* max_properties) {
   5797   if (boilerplate->map()->is_deprecated() &&
   5798       !JSObject::TryMigrateInstance(boilerplate)) {
   5799     return false;
   5800   }
   5801 
   5802   DCHECK(max_depth >= 0 && *max_properties >= 0);
   5803   if (max_depth == 0) return false;
   5804 
   5805   Isolate* isolate = boilerplate->GetIsolate();
   5806   Handle<FixedArrayBase> elements(boilerplate->elements());
   5807   if (elements->length() > 0 &&
   5808       elements->map() != isolate->heap()->fixed_cow_array_map()) {
   5809     if (boilerplate->HasFastSmiOrObjectElements()) {
   5810       Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements);
   5811       int length = elements->length();
   5812       for (int i = 0; i < length; i++) {
   5813         if ((*max_properties)-- == 0) return false;
   5814         Handle<Object> value(fast_elements->get(i), isolate);
   5815         if (value->IsJSObject()) {
   5816           Handle<JSObject> value_object = Handle<JSObject>::cast(value);
   5817           if (!IsFastLiteral(value_object,
   5818                              max_depth - 1,
   5819                              max_properties)) {
   5820             return false;
   5821           }
   5822         }
   5823       }
   5824     } else if (!boilerplate->HasFastDoubleElements()) {
   5825       return false;
   5826     }
   5827   }
   5828 
   5829   Handle<FixedArray> properties(boilerplate->properties());
   5830   if (properties->length() > 0) {
   5831     return false;
   5832   } else {
   5833     Handle<DescriptorArray> descriptors(
   5834         boilerplate->map()->instance_descriptors());
   5835     int limit = boilerplate->map()->NumberOfOwnDescriptors();
   5836     for (int i = 0; i < limit; i++) {
   5837       PropertyDetails details = descriptors->GetDetails(i);
   5838       if (details.type() != DATA) continue;
   5839       if ((*max_properties)-- == 0) return false;
   5840       FieldIndex field_index = FieldIndex::ForDescriptor(boilerplate->map(), i);
   5841       if (boilerplate->IsUnboxedDoubleField(field_index)) continue;
   5842       Handle<Object> value(boilerplate->RawFastPropertyAt(field_index),
   5843                            isolate);
   5844       if (value->IsJSObject()) {
   5845         Handle<JSObject> value_object = Handle<JSObject>::cast(value);
   5846         if (!IsFastLiteral(value_object,
   5847                            max_depth - 1,
   5848                            max_properties)) {
   5849           return false;
   5850         }
   5851       }
   5852     }
   5853   }
   5854   return true;
   5855 }
   5856 
   5857 
   5858 void HOptimizedGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) {
   5859   DCHECK(!HasStackOverflow());
   5860   DCHECK(current_block() != NULL);
   5861   DCHECK(current_block()->HasPredecessor());
   5862 
   5863   Handle<JSFunction> closure = function_state()->compilation_info()->closure();
   5864   HInstruction* literal;
   5865 
   5866   // Check whether to use fast or slow deep-copying for boilerplate.
   5867   int max_properties = kMaxFastLiteralProperties;
   5868   Handle<Object> literals_cell(
   5869       closure->literals()->literal(expr->literal_index()), isolate());
   5870   Handle<AllocationSite> site;
   5871   Handle<JSObject> boilerplate;
   5872   if (!literals_cell->IsUndefined()) {
   5873     // Retrieve the boilerplate
   5874     site = Handle<AllocationSite>::cast(literals_cell);
   5875     boilerplate = Handle<JSObject>(JSObject::cast(site->transition_info()),
   5876                                    isolate());
   5877   }
   5878 
   5879   if (!boilerplate.is_null() &&
   5880       IsFastLiteral(boilerplate, kMaxFastLiteralDepth, &max_properties)) {
   5881     AllocationSiteUsageContext site_context(isolate(), site, false);
   5882     site_context.EnterNewScope();
   5883     literal = BuildFastLiteral(boilerplate, &site_context);
   5884     site_context.ExitScope(site, boilerplate);
   5885   } else {
   5886     NoObservableSideEffectsScope no_effects(this);
   5887     Handle<FixedArray> constant_properties = expr->constant_properties();
   5888     int literal_index = expr->literal_index();
   5889     int flags = expr->ComputeFlags(true);
   5890 
   5891     Add<HPushArguments>(AddThisFunction(), Add<HConstant>(literal_index),
   5892                         Add<HConstant>(constant_properties),
   5893                         Add<HConstant>(flags));
   5894 
   5895     Runtime::FunctionId function_id = Runtime::kCreateObjectLiteral;
   5896     literal = Add<HCallRuntime>(Runtime::FunctionForId(function_id), 4);
   5897   }
   5898 
   5899   // The object is expected in the bailout environment during computation
   5900   // of the property values and is the value of the entire expression.
   5901   Push(literal);
   5902   for (int i = 0; i < expr->properties()->length(); i++) {
   5903     ObjectLiteral::Property* property = expr->properties()->at(i);
   5904     if (property->is_computed_name()) return Bailout(kComputedPropertyName);
   5905     if (property->IsCompileTimeValue()) continue;
   5906 
   5907     Literal* key = property->key()->AsLiteral();
   5908     Expression* value = property->value();
   5909 
   5910     switch (property->kind()) {
   5911       case ObjectLiteral::Property::MATERIALIZED_LITERAL:
   5912         DCHECK(!CompileTimeValue::IsCompileTimeValue(value));
   5913         // Fall through.
   5914       case ObjectLiteral::Property::COMPUTED:
   5915         // It is safe to use [[Put]] here because the boilerplate already
   5916         // contains computed properties with an uninitialized value.
   5917         if (key->value()->IsInternalizedString()) {
   5918           if (property->emit_store()) {
   5919             CHECK_ALIVE(VisitForValue(value));
   5920             HValue* value = Pop();
   5921 
   5922             Handle<Map> map = property->GetReceiverType();
   5923             Handle<String> name = key->AsPropertyName();
   5924             HValue* store;
   5925             FeedbackVectorSlot slot = property->GetSlot();
   5926             if (map.is_null()) {
   5927               // If we don't know the monomorphic type, do a generic store.
   5928               CHECK_ALIVE(store = BuildNamedGeneric(STORE, NULL, slot, literal,
   5929                                                     name, value));
   5930             } else {
   5931               PropertyAccessInfo info(this, STORE, map, name);
   5932               if (info.CanAccessMonomorphic()) {
   5933                 HValue* checked_literal = Add<HCheckMaps>(literal, map);
   5934                 DCHECK(!info.IsAccessorConstant());
   5935                 store = BuildMonomorphicAccess(
   5936                     &info, literal, checked_literal, value,
   5937                     BailoutId::None(), BailoutId::None());
   5938               } else {
   5939                 CHECK_ALIVE(store = BuildNamedGeneric(STORE, NULL, slot,
   5940                                                       literal, name, value));
   5941               }
   5942             }
   5943             if (store->IsInstruction()) {
   5944               AddInstruction(HInstruction::cast(store));
   5945             }
   5946             DCHECK(store->HasObservableSideEffects());
   5947             Add<HSimulate>(key->id(), REMOVABLE_SIMULATE);
   5948 
   5949             // Add [[HomeObject]] to function literals.
   5950             if (FunctionLiteral::NeedsHomeObject(property->value())) {
   5951               Handle<Symbol> sym = isolate()->factory()->home_object_symbol();
   5952               HInstruction* store_home = BuildNamedGeneric(
   5953                   STORE, NULL, property->GetSlot(1), value, sym, literal);
   5954               AddInstruction(store_home);
   5955               DCHECK(store_home->HasObservableSideEffects());
   5956               Add<HSimulate>(property->value()->id(), REMOVABLE_SIMULATE);
   5957             }
   5958           } else {
   5959             CHECK_ALIVE(VisitForEffect(value));
   5960           }
   5961           break;
   5962         }
   5963         // Fall through.
   5964       case ObjectLiteral::Property::PROTOTYPE:
   5965       case ObjectLiteral::Property::SETTER:
   5966       case ObjectLiteral::Property::GETTER:
   5967         return Bailout(kObjectLiteralWithComplexProperty);
   5968       default: UNREACHABLE();
   5969     }
   5970   }
   5971 
   5972   if (expr->has_function()) {
   5973     // Return the result of the transformation to fast properties
   5974     // instead of the original since this operation changes the map
   5975     // of the object. This makes sure that the original object won't
   5976     // be used by other optimized code before it is transformed
   5977     // (e.g. because of code motion).
   5978     HToFastProperties* result = Add<HToFastProperties>(Pop());
   5979     return ast_context()->ReturnValue(result);
   5980   } else {
   5981     return ast_context()->ReturnValue(Pop());
   5982   }
   5983 }
   5984 
   5985 
   5986 void HOptimizedGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
   5987   DCHECK(!HasStackOverflow());
   5988   DCHECK(current_block() != NULL);
   5989   DCHECK(current_block()->HasPredecessor());
   5990   ZoneList<Expression*>* subexprs = expr->values();
   5991   int length = subexprs->length();
   5992   HInstruction* literal;
   5993 
   5994   Handle<AllocationSite> site;
   5995   Handle<LiteralsArray> literals(environment()->closure()->literals(),
   5996                                  isolate());
   5997   bool uninitialized = false;
   5998   Handle<Object> literals_cell(literals->literal(expr->literal_index()),
   5999                                isolate());
   6000   Handle<JSObject> boilerplate_object;
   6001   if (literals_cell->IsUndefined()) {
   6002     uninitialized = true;
   6003     Handle<Object> raw_boilerplate;
   6004     ASSIGN_RETURN_ON_EXCEPTION_VALUE(
   6005         isolate(), raw_boilerplate,
   6006         Runtime::CreateArrayLiteralBoilerplate(
   6007             isolate(), literals, expr->constant_elements(),
   6008             is_strong(function_language_mode())),
   6009         Bailout(kArrayBoilerplateCreationFailed));
   6010 
   6011     boilerplate_object = Handle<JSObject>::cast(raw_boilerplate);
   6012     AllocationSiteCreationContext creation_context(isolate());
   6013     site = creation_context.EnterNewScope();
   6014     if (JSObject::DeepWalk(boilerplate_object, &creation_context).is_null()) {
   6015       return Bailout(kArrayBoilerplateCreationFailed);
   6016     }
   6017     creation_context.ExitScope(site, boilerplate_object);
   6018     literals->set_literal(expr->literal_index(), *site);
   6019 
   6020     if (boilerplate_object->elements()->map() ==
   6021         isolate()->heap()->fixed_cow_array_map()) {
   6022       isolate()->counters()->cow_arrays_created_runtime()->Increment();
   6023     }
   6024   } else {
   6025     DCHECK(literals_cell->IsAllocationSite());
   6026     site = Handle<AllocationSite>::cast(literals_cell);
   6027     boilerplate_object = Handle<JSObject>(
   6028         JSObject::cast(site->transition_info()), isolate());
   6029   }
   6030 
   6031   DCHECK(!boilerplate_object.is_null());
   6032   DCHECK(site->SitePointsToLiteral());
   6033 
   6034   ElementsKind boilerplate_elements_kind =
   6035       boilerplate_object->GetElementsKind();
   6036 
   6037   // Check whether to use fast or slow deep-copying for boilerplate.
   6038   int max_properties = kMaxFastLiteralProperties;
   6039   if (IsFastLiteral(boilerplate_object,
   6040                     kMaxFastLiteralDepth,
   6041                     &max_properties)) {
   6042     AllocationSiteUsageContext site_context(isolate(), site, false);
   6043     site_context.EnterNewScope();
   6044     literal = BuildFastLiteral(boilerplate_object, &site_context);
   6045     site_context.ExitScope(site, boilerplate_object);
   6046   } else {
   6047     NoObservableSideEffectsScope no_effects(this);
   6048     // Boilerplate already exists and constant elements are never accessed,
   6049     // pass an empty fixed array to the runtime function instead.
   6050     Handle<FixedArray> constants = isolate()->factory()->empty_fixed_array();
   6051     int literal_index = expr->literal_index();
   6052     int flags = expr->ComputeFlags(true);
   6053 
   6054     Add<HPushArguments>(AddThisFunction(), Add<HConstant>(literal_index),
   6055                         Add<HConstant>(constants), Add<HConstant>(flags));
   6056 
   6057     Runtime::FunctionId function_id = Runtime::kCreateArrayLiteral;
   6058     literal = Add<HCallRuntime>(Runtime::FunctionForId(function_id), 4);
   6059 
   6060     // Register to deopt if the boilerplate ElementsKind changes.
   6061     top_info()->dependencies()->AssumeTransitionStable(site);
   6062   }
   6063 
   6064   // The array is expected in the bailout environment during computation
   6065   // of the property values and is the value of the entire expression.
   6066   Push(literal);
   6067 
   6068   HInstruction* elements = NULL;
   6069 
   6070   for (int i = 0; i < length; i++) {
   6071     Expression* subexpr = subexprs->at(i);
   6072     if (subexpr->IsSpread()) {
   6073       return Bailout(kSpread);
   6074     }
   6075 
   6076     // If the subexpression is a literal or a simple materialized literal it
   6077     // is already set in the cloned array.
   6078     if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
   6079 
   6080     CHECK_ALIVE(VisitForValue(subexpr));
   6081     HValue* value = Pop();
   6082     if (!Smi::IsValid(i)) return Bailout(kNonSmiKeyInArrayLiteral);
   6083 
   6084     elements = AddLoadElements(literal);
   6085 
   6086     HValue* key = Add<HConstant>(i);
   6087 
   6088     switch (boilerplate_elements_kind) {
   6089       case FAST_SMI_ELEMENTS:
   6090       case FAST_HOLEY_SMI_ELEMENTS:
   6091       case FAST_ELEMENTS:
   6092       case FAST_HOLEY_ELEMENTS:
   6093       case FAST_DOUBLE_ELEMENTS:
   6094       case FAST_HOLEY_DOUBLE_ELEMENTS: {
   6095         HStoreKeyed* instr = Add<HStoreKeyed>(elements, key, value, nullptr,
   6096                                               boilerplate_elements_kind);
   6097         instr->SetUninitialized(uninitialized);
   6098         break;
   6099       }
   6100       default:
   6101         UNREACHABLE();
   6102         break;
   6103     }
   6104 
   6105     Add<HSimulate>(expr->GetIdForElement(i));
   6106   }
   6107 
   6108   return ast_context()->ReturnValue(Pop());
   6109 }
   6110 
   6111 
   6112 HCheckMaps* HOptimizedGraphBuilder::AddCheckMap(HValue* object,
   6113                                                 Handle<Map> map) {
   6114   BuildCheckHeapObject(object);
   6115   return Add<HCheckMaps>(object, map);
   6116 }
   6117 
   6118 
   6119 HInstruction* HOptimizedGraphBuilder::BuildLoadNamedField(
   6120     PropertyAccessInfo* info,
   6121     HValue* checked_object) {
   6122   // See if this is a load for an immutable property
   6123   if (checked_object->ActualValue()->IsConstant()) {
   6124     Handle<Object> object(
   6125         HConstant::cast(checked_object->ActualValue())->handle(isolate()));
   6126 
   6127     if (object->IsJSObject()) {
   6128       LookupIterator it(object, info->name(),
   6129                         LookupIterator::OWN_SKIP_INTERCEPTOR);
   6130       Handle<Object> value = JSReceiver::GetDataProperty(&it);
   6131       if (it.IsFound() && it.IsReadOnly() && !it.IsConfigurable()) {
   6132         return New<HConstant>(value);
   6133       }
   6134     }
   6135   }
   6136 
   6137   HObjectAccess access = info->access();
   6138   if (access.representation().IsDouble() &&
   6139       (!FLAG_unbox_double_fields || !access.IsInobject())) {
   6140     // Load the heap number.
   6141     checked_object = Add<HLoadNamedField>(
   6142         checked_object, nullptr,
   6143         access.WithRepresentation(Representation::Tagged()));
   6144     // Load the double value from it.
   6145     access = HObjectAccess::ForHeapNumberValue();
   6146   }
   6147 
   6148   SmallMapList* map_list = info->field_maps();
   6149   if (map_list->length() == 0) {
   6150     return New<HLoadNamedField>(checked_object, checked_object, access);
   6151   }
   6152 
   6153   UniqueSet<Map>* maps = new(zone()) UniqueSet<Map>(map_list->length(), zone());
   6154   for (int i = 0; i < map_list->length(); ++i) {
   6155     maps->Add(Unique<Map>::CreateImmovable(map_list->at(i)), zone());
   6156   }
   6157   return New<HLoadNamedField>(
   6158       checked_object, checked_object, access, maps, info->field_type());
   6159 }
   6160 
   6161 
   6162 HInstruction* HOptimizedGraphBuilder::BuildStoreNamedField(
   6163     PropertyAccessInfo* info,
   6164     HValue* checked_object,
   6165     HValue* value) {
   6166   bool transition_to_field = info->IsTransition();
   6167   // TODO(verwaest): Move this logic into PropertyAccessInfo.
   6168   HObjectAccess field_access = info->access();
   6169 
   6170   HStoreNamedField *instr;
   6171   if (field_access.representation().IsDouble() &&
   6172       (!FLAG_unbox_double_fields || !field_access.IsInobject())) {
   6173     HObjectAccess heap_number_access =
   6174         field_access.WithRepresentation(Representation::Tagged());
   6175     if (transition_to_field) {
   6176       // The store requires a mutable HeapNumber to be allocated.
   6177       NoObservableSideEffectsScope no_side_effects(this);
   6178       HInstruction* heap_number_size = Add<HConstant>(HeapNumber::kSize);
   6179 
   6180       // TODO(hpayer): Allocation site pretenuring support.
   6181       HInstruction* heap_number = Add<HAllocate>(heap_number_size,
   6182           HType::HeapObject(),
   6183           NOT_TENURED,
   6184           MUTABLE_HEAP_NUMBER_TYPE);
   6185       AddStoreMapConstant(
   6186           heap_number, isolate()->factory()->mutable_heap_number_map());
   6187       Add<HStoreNamedField>(heap_number, HObjectAccess::ForHeapNumberValue(),
   6188                             value);
   6189       instr = New<HStoreNamedField>(checked_object->ActualValue(),
   6190                                     heap_number_access,
   6191                                     heap_number);
   6192     } else {
   6193       // Already holds a HeapNumber; load the box and write its value field.
   6194       HInstruction* heap_number =
   6195           Add<HLoadNamedField>(checked_object, nullptr, heap_number_access);
   6196       instr = New<HStoreNamedField>(heap_number,
   6197                                     HObjectAccess::ForHeapNumberValue(),
   6198                                     value, STORE_TO_INITIALIZED_ENTRY);
   6199     }
   6200   } else {
   6201     if (field_access.representation().IsHeapObject()) {
   6202       BuildCheckHeapObject(value);
   6203     }
   6204 
   6205     if (!info->field_maps()->is_empty()) {
   6206       DCHECK(field_access.representation().IsHeapObject());
   6207       value = Add<HCheckMaps>(value, info->field_maps());
   6208     }
   6209 
   6210     // This is a normal store.
   6211     instr = New<HStoreNamedField>(
   6212         checked_object->ActualValue(), field_access, value,
   6213         transition_to_field ? INITIALIZING_STORE : STORE_TO_INITIALIZED_ENTRY);
   6214   }
   6215 
   6216   if (transition_to_field) {
   6217     Handle<Map> transition(info->transition());
   6218     DCHECK(!transition->is_deprecated());
   6219     instr->SetTransition(Add<HConstant>(transition));
   6220   }
   6221   return instr;
   6222 }
   6223 
   6224 
   6225 bool HOptimizedGraphBuilder::PropertyAccessInfo::IsCompatible(
   6226     PropertyAccessInfo* info) {
   6227   if (!CanInlinePropertyAccess(map_)) return false;
   6228 
   6229   // Currently only handle Type::Number as a polymorphic case.
   6230   // TODO(verwaest): Support monomorphic handling of numbers with a HCheckNumber
   6231   // instruction.
   6232   if (IsNumberType()) return false;
   6233 
   6234   // Values are only compatible for monomorphic load if they all behave the same
   6235   // regarding value wrappers.
   6236   if (IsValueWrapped() != info->IsValueWrapped()) return false;
   6237 
   6238   if (!LookupDescriptor()) return false;
   6239 
   6240   if (!IsFound()) {
   6241     return (!info->IsFound() || info->has_holder()) &&
   6242            map()->prototype() == info->map()->prototype();
   6243   }
   6244 
   6245   // Mismatch if the other access info found the property in the prototype
   6246   // chain.
   6247   if (info->has_holder()) return false;
   6248 
   6249   if (IsAccessorConstant()) {
   6250     return accessor_.is_identical_to(info->accessor_) &&
   6251         api_holder_.is_identical_to(info->api_holder_);
   6252   }
   6253 
   6254   if (IsDataConstant()) {
   6255     return constant_.is_identical_to(info->constant_);
   6256   }
   6257 
   6258   DCHECK(IsData());
   6259   if (!info->IsData()) return false;
   6260 
   6261   Representation r = access_.representation();
   6262   if (IsLoad()) {
   6263     if (!info->access_.representation().IsCompatibleForLoad(r)) return false;
   6264   } else {
   6265     if (!info->access_.representation().IsCompatibleForStore(r)) return false;
   6266   }
   6267   if (info->access_.offset() != access_.offset()) return false;
   6268   if (info->access_.IsInobject() != access_.IsInobject()) return false;
   6269   if (IsLoad()) {
   6270     if (field_maps_.is_empty()) {
   6271       info->field_maps_.Clear();
   6272     } else if (!info->field_maps_.is_empty()) {
   6273       for (int i = 0; i < field_maps_.length(); ++i) {
   6274         info->field_maps_.AddMapIfMissing(field_maps_.at(i), info->zone());
   6275       }
   6276       info->field_maps_.Sort();
   6277     }
   6278   } else {
   6279     // We can only merge stores that agree on their field maps. The comparison
   6280     // below is safe, since we keep the field maps sorted.
   6281     if (field_maps_.length() != info->field_maps_.length()) return false;
   6282     for (int i = 0; i < field_maps_.length(); ++i) {
   6283       if (!field_maps_.at(i).is_identical_to(info->field_maps_.at(i))) {
   6284         return false;
   6285       }
   6286     }
   6287   }
   6288   info->GeneralizeRepresentation(r);
   6289   info->field_type_ = info->field_type_.Combine(field_type_);
   6290   return true;
   6291 }
   6292 
   6293 
   6294 bool HOptimizedGraphBuilder::PropertyAccessInfo::LookupDescriptor() {
   6295   if (!map_->IsJSObjectMap()) return true;
   6296   LookupDescriptor(*map_, *name_);
   6297   return LoadResult(map_);
   6298 }
   6299 
   6300 
   6301 bool HOptimizedGraphBuilder::PropertyAccessInfo::LoadResult(Handle<Map> map) {
   6302   if (!IsLoad() && IsProperty() && IsReadOnly()) {
   6303     return false;
   6304   }
   6305 
   6306   if (IsData()) {
   6307     // Construct the object field access.
   6308     int index = GetLocalFieldIndexFromMap(map);
   6309     access_ = HObjectAccess::ForField(map, index, representation(), name_);
   6310 
   6311     // Load field map for heap objects.
   6312     return LoadFieldMaps(map);
   6313   } else if (IsAccessorConstant()) {
   6314     Handle<Object> accessors = GetAccessorsFromMap(map);
   6315     if (!accessors->IsAccessorPair()) return false;
   6316     Object* raw_accessor =
   6317         IsLoad() ? Handle<AccessorPair>::cast(accessors)->getter()
   6318                  : Handle<AccessorPair>::cast(accessors)->setter();
   6319     if (!raw_accessor->IsJSFunction()) return false;
   6320     Handle<JSFunction> accessor = handle(JSFunction::cast(raw_accessor));
   6321     if (accessor->shared()->IsApiFunction()) {
   6322       CallOptimization call_optimization(accessor);
   6323       if (call_optimization.is_simple_api_call()) {
   6324         CallOptimization::HolderLookup holder_lookup;
   6325         api_holder_ =
   6326             call_optimization.LookupHolderOfExpectedType(map_, &holder_lookup);
   6327       }
   6328     }
   6329     accessor_ = accessor;
   6330   } else if (IsDataConstant()) {
   6331     constant_ = GetConstantFromMap(map);
   6332   }
   6333 
   6334   return true;
   6335 }
   6336 
   6337 
   6338 bool HOptimizedGraphBuilder::PropertyAccessInfo::LoadFieldMaps(
   6339     Handle<Map> map) {
   6340   // Clear any previously collected field maps/type.
   6341   field_maps_.Clear();
   6342   field_type_ = HType::Tagged();
   6343 
   6344   // Figure out the field type from the accessor map.
   6345   Handle<HeapType> field_type = GetFieldTypeFromMap(map);
   6346 
   6347   // Collect the (stable) maps from the field type.
   6348   int num_field_maps = field_type->NumClasses();
   6349   if (num_field_maps > 0) {
   6350     DCHECK(access_.representation().IsHeapObject());
   6351     field_maps_.Reserve(num_field_maps, zone());
   6352     HeapType::Iterator<Map> it = field_type->Classes();
   6353     while (!it.Done()) {
   6354       Handle<Map> field_map = it.Current();
   6355       if (!field_map->is_stable()) {
   6356         field_maps_.Clear();
   6357         break;
   6358       }
   6359       field_maps_.Add(field_map, zone());
   6360       it.Advance();
   6361     }
   6362   }
   6363 
   6364   if (field_maps_.is_empty()) {
   6365     // Store is not safe if the field map was cleared.
   6366     return IsLoad() || !field_type->Is(HeapType::None());
   6367   }
   6368 
   6369   field_maps_.Sort();
   6370   DCHECK_EQ(num_field_maps, field_maps_.length());
   6371 
   6372   // Determine field HType from field HeapType.
   6373   field_type_ = HType::FromType<HeapType>(field_type);
   6374   DCHECK(field_type_.IsHeapObject());
   6375 
   6376   // Add dependency on the map that introduced the field.
   6377   top_info()->dependencies()->AssumeFieldType(GetFieldOwnerFromMap(map));
   6378   return true;
   6379 }
   6380 
   6381 
   6382 bool HOptimizedGraphBuilder::PropertyAccessInfo::LookupInPrototypes() {
   6383   Handle<Map> map = this->map();
   6384 
   6385   while (map->prototype()->IsJSObject()) {
   6386     holder_ = handle(JSObject::cast(map->prototype()));
   6387     if (holder_->map()->is_deprecated()) {
   6388       JSObject::TryMigrateInstance(holder_);
   6389     }
   6390     map = Handle<Map>(holder_->map());
   6391     if (!CanInlinePropertyAccess(map)) {
   6392       NotFound();
   6393       return false;
   6394     }
   6395     LookupDescriptor(*map, *name_);
   6396     if (IsFound()) return LoadResult(map);
   6397   }
   6398 
   6399   NotFound();
   6400   return !map->prototype()->IsJSReceiver();
   6401 }
   6402 
   6403 
   6404 bool HOptimizedGraphBuilder::PropertyAccessInfo::IsIntegerIndexedExotic() {
   6405   InstanceType instance_type = map_->instance_type();
   6406   return instance_type == JS_TYPED_ARRAY_TYPE && name_->IsString() &&
   6407          IsSpecialIndex(isolate()->unicode_cache(), String::cast(*name_));
   6408 }
   6409 
   6410 
   6411 bool HOptimizedGraphBuilder::PropertyAccessInfo::CanAccessMonomorphic() {
   6412   if (!CanInlinePropertyAccess(map_)) return false;
   6413   if (IsJSObjectFieldAccessor()) return IsLoad();
   6414   if (IsJSArrayBufferViewFieldAccessor()) return IsLoad();
   6415   if (map_->IsJSFunctionMap() && map_->is_constructor() &&
   6416       !map_->has_non_instance_prototype() &&
   6417       name_.is_identical_to(isolate()->factory()->prototype_string())) {
   6418     return IsLoad();
   6419   }
   6420   if (!LookupDescriptor()) return false;
   6421   if (IsFound()) return IsLoad() || !IsReadOnly();
   6422   if (IsIntegerIndexedExotic()) return false;
   6423   if (!LookupInPrototypes()) return false;
   6424   if (IsLoad()) return true;
   6425 
   6426   if (IsAccessorConstant()) return true;
   6427   LookupTransition(*map_, *name_, NONE);
   6428   if (IsTransitionToData() && map_->unused_property_fields() > 0) {
   6429     // Construct the object field access.
   6430     int descriptor = transition()->LastAdded();
   6431     int index =
   6432         transition()->instance_descriptors()->GetFieldIndex(descriptor) -
   6433         map_->GetInObjectProperties();
   6434     PropertyDetails details =
   6435         transition()->instance_descriptors()->GetDetails(descriptor);
   6436     Representation representation = details.representation();
   6437     access_ = HObjectAccess::ForField(map_, index, representation, name_);
   6438 
   6439     // Load field map for heap objects.
   6440     return LoadFieldMaps(transition());
   6441   }
   6442   return false;
   6443 }
   6444 
   6445 
   6446 bool HOptimizedGraphBuilder::PropertyAccessInfo::CanAccessAsMonomorphic(
   6447     SmallMapList* maps) {
   6448   DCHECK(map_.is_identical_to(maps->first()));
   6449   if (!CanAccessMonomorphic()) return false;
   6450   STATIC_ASSERT(kMaxLoadPolymorphism == kMaxStorePolymorphism);
   6451   if (maps->length() > kMaxLoadPolymorphism) return false;
   6452   HObjectAccess access = HObjectAccess::ForMap();  // bogus default
   6453   if (GetJSObjectFieldAccess(&access)) {
   6454     for (int i = 1; i < maps->length(); ++i) {
   6455       PropertyAccessInfo test_info(builder_, access_type_, maps->at(i), name_);
   6456       HObjectAccess test_access = HObjectAccess::ForMap();  // bogus default
   6457       if (!test_info.GetJSObjectFieldAccess(&test_access)) return false;
   6458       if (!access.Equals(test_access)) return false;
   6459     }
   6460     return true;
   6461   }
   6462   if (GetJSArrayBufferViewFieldAccess(&access)) {
   6463     for (int i = 1; i < maps->length(); ++i) {
   6464       PropertyAccessInfo test_info(builder_, access_type_, maps->at(i), name_);
   6465       HObjectAccess test_access = HObjectAccess::ForMap();  // bogus default
   6466       if (!test_info.GetJSArrayBufferViewFieldAccess(&test_access)) {
   6467         return false;
   6468       }
   6469       if (!access.Equals(test_access)) return false;
   6470     }
   6471     return true;
   6472   }
   6473 
   6474   // Currently only handle numbers as a polymorphic case.
   6475   // TODO(verwaest): Support monomorphic handling of numbers with a HCheckNumber
   6476   // instruction.
   6477   if (IsNumberType()) return false;
   6478 
   6479   // Multiple maps cannot transition to the same target map.
   6480   DCHECK(!IsLoad() || !IsTransition());
   6481   if (IsTransition() && maps->length() > 1) return false;
   6482 
   6483   for (int i = 1; i < maps->length(); ++i) {
   6484     PropertyAccessInfo test_info(builder_, access_type_, maps->at(i), name_);
   6485     if (!test_info.IsCompatible(this)) return false;
   6486   }
   6487 
   6488   return true;
   6489 }
   6490 
   6491 
   6492 Handle<Map> HOptimizedGraphBuilder::PropertyAccessInfo::map() {
   6493   Handle<JSFunction> ctor;
   6494   if (Map::GetConstructorFunction(
   6495           map_, handle(current_info()->closure()->context()->native_context()))
   6496           .ToHandle(&ctor)) {
   6497     return handle(ctor->initial_map());
   6498   }
   6499   return map_;
   6500 }
   6501 
   6502 
   6503 static bool NeedsWrapping(Handle<Map> map, Handle<JSFunction> target) {
   6504   return !map->IsJSObjectMap() &&
   6505          is_sloppy(target->shared()->language_mode()) &&
   6506          !target->shared()->native();
   6507 }
   6508 
   6509 
   6510 bool HOptimizedGraphBuilder::PropertyAccessInfo::NeedsWrappingFor(
   6511     Handle<JSFunction> target) const {
   6512   return NeedsWrapping(map_, target);
   6513 }
   6514 
   6515 
   6516 HValue* HOptimizedGraphBuilder::BuildMonomorphicAccess(
   6517     PropertyAccessInfo* info, HValue* object, HValue* checked_object,
   6518     HValue* value, BailoutId ast_id, BailoutId return_id,
   6519     bool can_inline_accessor) {
   6520   HObjectAccess access = HObjectAccess::ForMap();  // bogus default
   6521   if (info->GetJSObjectFieldAccess(&access)) {
   6522     DCHECK(info->IsLoad());
   6523     return New<HLoadNamedField>(object, checked_object, access);
   6524   }
   6525 
   6526   if (info->GetJSArrayBufferViewFieldAccess(&access)) {
   6527     DCHECK(info->IsLoad());
   6528     checked_object = Add<HCheckArrayBufferNotNeutered>(checked_object);
   6529     return New<HLoadNamedField>(object, checked_object, access);
   6530   }
   6531 
   6532   if (info->name().is_identical_to(isolate()->factory()->prototype_string()) &&
   6533       info->map()->IsJSFunctionMap() && info->map()->is_constructor()) {
   6534     DCHECK(!info->map()->has_non_instance_prototype());
   6535     return New<HLoadFunctionPrototype>(checked_object);
   6536   }
   6537 
   6538   HValue* checked_holder = checked_object;
   6539   if (info->has_holder()) {
   6540     Handle<JSObject> prototype(JSObject::cast(info->map()->prototype()));
   6541     checked_holder = BuildCheckPrototypeMaps(prototype, info->holder());
   6542   }
   6543 
   6544   if (!info->IsFound()) {
   6545     DCHECK(info->IsLoad());
   6546     if (is_strong(function_language_mode())) {
   6547       return New<HCallRuntime>(
   6548           Runtime::FunctionForId(Runtime::kThrowStrongModeImplicitConversion),
   6549           0);
   6550     } else {
   6551       return graph()->GetConstantUndefined();
   6552     }
   6553   }
   6554 
   6555   if (info->IsData()) {
   6556     if (info->IsLoad()) {
   6557       return BuildLoadNamedField(info, checked_holder);
   6558     } else {
   6559       return BuildStoreNamedField(info, checked_object, value);
   6560     }
   6561   }
   6562 
   6563   if (info->IsTransition()) {
   6564     DCHECK(!info->IsLoad());
   6565     return BuildStoreNamedField(info, checked_object, value);
   6566   }
   6567 
   6568   if (info->IsAccessorConstant()) {
   6569     Push(checked_object);
   6570     int argument_count = 1;
   6571     if (!info->IsLoad()) {
   6572       argument_count = 2;
   6573       Push(value);
   6574     }
   6575 
   6576     if (info->NeedsWrappingFor(info->accessor())) {
   6577       HValue* function = Add<HConstant>(info->accessor());
   6578       PushArgumentsFromEnvironment(argument_count);
   6579       return New<HCallFunction>(function, argument_count,
   6580                                 ConvertReceiverMode::kNotNullOrUndefined);
   6581     } else if (FLAG_inline_accessors && can_inline_accessor) {
   6582       bool success = info->IsLoad()
   6583           ? TryInlineGetter(info->accessor(), info->map(), ast_id, return_id)
   6584           : TryInlineSetter(
   6585               info->accessor(), info->map(), ast_id, return_id, value);
   6586       if (success || HasStackOverflow()) return NULL;
   6587     }
   6588 
   6589     PushArgumentsFromEnvironment(argument_count);
   6590     return BuildCallConstantFunction(info->accessor(), argument_count);
   6591   }
   6592 
   6593   DCHECK(info->IsDataConstant());
   6594   if (info->IsLoad()) {
   6595     return New<HConstant>(info->constant());
   6596   } else {
   6597     return New<HCheckValue>(value, Handle<JSFunction>::cast(info->constant()));
   6598   }
   6599 }
   6600 
   6601 
   6602 void HOptimizedGraphBuilder::HandlePolymorphicNamedFieldAccess(
   6603     PropertyAccessType access_type, Expression* expr, FeedbackVectorSlot slot,
   6604     BailoutId ast_id, BailoutId return_id, HValue* object, HValue* value,
   6605     SmallMapList* maps, Handle<Name> name) {
   6606   // Something did not match; must use a polymorphic load.
   6607   int count = 0;
   6608   HBasicBlock* join = NULL;
   6609   HBasicBlock* number_block = NULL;
   6610   bool handled_string = false;
   6611 
   6612   bool handle_smi = false;
   6613   STATIC_ASSERT(kMaxLoadPolymorphism == kMaxStorePolymorphism);
   6614   int i;
   6615   for (i = 0; i < maps->length() && count < kMaxLoadPolymorphism; ++i) {
   6616     PropertyAccessInfo info(this, access_type, maps->at(i), name);
   6617     if (info.IsStringType()) {
   6618       if (handled_string) continue;
   6619       handled_string = true;
   6620     }
   6621     if (info.CanAccessMonomorphic()) {
   6622       count++;
   6623       if (info.IsNumberType()) {
   6624         handle_smi = true;
   6625         break;
   6626       }
   6627     }
   6628   }
   6629 
   6630   if (i < maps->length()) {
   6631     count = -1;
   6632     maps->Clear();
   6633   } else {
   6634     count = 0;
   6635   }
   6636   HControlInstruction* smi_check = NULL;
   6637   handled_string = false;
   6638 
   6639   for (i = 0; i < maps->length() && count < kMaxLoadPolymorphism; ++i) {
   6640     PropertyAccessInfo info(this, access_type, maps->at(i), name);
   6641     if (info.IsStringType()) {
   6642       if (handled_string) continue;
   6643       handled_string = true;
   6644     }
   6645     if (!info.CanAccessMonomorphic()) continue;
   6646 
   6647     if (count == 0) {
   6648       join = graph()->CreateBasicBlock();
   6649       if (handle_smi) {
   6650         HBasicBlock* empty_smi_block = graph()->CreateBasicBlock();
   6651         HBasicBlock* not_smi_block = graph()->CreateBasicBlock();
   6652         number_block = graph()->CreateBasicBlock();
   6653         smi_check = New<HIsSmiAndBranch>(
   6654             object, empty_smi_block, not_smi_block);
   6655         FinishCurrentBlock(smi_check);
   6656         GotoNoSimulate(empty_smi_block, number_block);
   6657         set_current_block(not_smi_block);
   6658       } else {
   6659         BuildCheckHeapObject(object);
   6660       }
   6661     }
   6662     ++count;
   6663     HBasicBlock* if_true = graph()->CreateBasicBlock();
   6664     HBasicBlock* if_false = graph()->CreateBasicBlock();
   6665     HUnaryControlInstruction* compare;
   6666 
   6667     HValue* dependency;
   6668     if (info.IsNumberType()) {
   6669       Handle<Map> heap_number_map = isolate()->factory()->heap_number_map();
   6670       compare = New<HCompareMap>(object, heap_number_map, if_true, if_false);
   6671       dependency = smi_check;
   6672     } else if (info.IsStringType()) {
   6673       compare = New<HIsStringAndBranch>(object, if_true, if_false);
   6674       dependency = compare;
   6675     } else {
   6676       compare = New<HCompareMap>(object, info.map(), if_true, if_false);
   6677       dependency = compare;
   6678     }
   6679     FinishCurrentBlock(compare);
   6680 
   6681     if (info.IsNumberType()) {
   6682       GotoNoSimulate(if_true, number_block);
   6683       if_true = number_block;
   6684     }
   6685 
   6686     set_current_block(if_true);
   6687 
   6688     HValue* access =
   6689         BuildMonomorphicAccess(&info, object, dependency, value, ast_id,
   6690                                return_id, FLAG_polymorphic_inlining);
   6691 
   6692     HValue* result = NULL;
   6693     switch (access_type) {
   6694       case LOAD:
   6695         result = access;
   6696         break;
   6697       case STORE:
   6698         result = value;
   6699         break;
   6700     }
   6701 
   6702     if (access == NULL) {
   6703       if (HasStackOverflow()) return;
   6704     } else {
   6705       if (access->IsInstruction()) {
   6706         HInstruction* instr = HInstruction::cast(access);
   6707         if (!instr->IsLinked()) AddInstruction(instr);
   6708       }
   6709       if (!ast_context()->IsEffect()) Push(result);
   6710     }
   6711 
   6712     if (current_block() != NULL) Goto(join);
   6713     set_current_block(if_false);
   6714   }
   6715 
   6716   // Finish up.  Unconditionally deoptimize if we've handled all the maps we
   6717   // know about and do not want to handle ones we've never seen.  Otherwise
   6718   // use a generic IC.
   6719   if (count == maps->length() && FLAG_deoptimize_uncommon_cases) {
   6720     FinishExitWithHardDeoptimization(
   6721         Deoptimizer::kUnknownMapInPolymorphicAccess);
   6722   } else {
   6723     HInstruction* instr =
   6724         BuildNamedGeneric(access_type, expr, slot, object, name, value);
   6725     AddInstruction(instr);
   6726     if (!ast_context()->IsEffect()) Push(access_type == LOAD ? instr : value);
   6727 
   6728     if (join != NULL) {
   6729       Goto(join);
   6730     } else {
   6731       Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
   6732       if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop());
   6733       return;
   6734     }
   6735   }
   6736 
   6737   DCHECK(join != NULL);
   6738   if (join->HasPredecessor()) {
   6739     join->SetJoinId(ast_id);
   6740     set_current_block(join);
   6741     if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop());
   6742   } else {
   6743     set_current_block(NULL);
   6744   }
   6745 }
   6746 
   6747 
   6748 static bool ComputeReceiverTypes(Expression* expr,
   6749                                  HValue* receiver,
   6750                                  SmallMapList** t,
   6751                                  Zone* zone) {
   6752   SmallMapList* maps = expr->GetReceiverTypes();
   6753   *t = maps;
   6754   bool monomorphic = expr->IsMonomorphic();
   6755   if (maps != NULL && receiver->HasMonomorphicJSObjectType()) {
   6756     Map* root_map = receiver->GetMonomorphicJSObjectMap()->FindRootMap();
   6757     maps->FilterForPossibleTransitions(root_map);
   6758     monomorphic = maps->length() == 1;
   6759   }
   6760   return monomorphic && CanInlinePropertyAccess(maps->first());
   6761 }
   6762 
   6763 
   6764 static bool AreStringTypes(SmallMapList* maps) {
   6765   for (int i = 0; i < maps->length(); i++) {
   6766     if (maps->at(i)->instance_type() >= FIRST_NONSTRING_TYPE) return false;
   6767   }
   6768   return true;
   6769 }
   6770 
   6771 
   6772 void HOptimizedGraphBuilder::BuildStore(Expression* expr, Property* prop,
   6773                                         FeedbackVectorSlot slot,
   6774                                         BailoutId ast_id, BailoutId return_id,
   6775                                         bool is_uninitialized) {
   6776   if (!prop->key()->IsPropertyName()) {
   6777     // Keyed store.
   6778     HValue* value = Pop();
   6779     HValue* key = Pop();
   6780     HValue* object = Pop();
   6781     bool has_side_effects = false;
   6782     HValue* result =
   6783         HandleKeyedElementAccess(object, key, value, expr, slot, ast_id,
   6784                                  return_id, STORE, &has_side_effects);
   6785     if (has_side_effects) {
   6786       if (!ast_context()->IsEffect()) Push(value);
   6787       Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
   6788       if (!ast_context()->IsEffect()) Drop(1);
   6789     }
   6790     if (result == NULL) return;
   6791     return ast_context()->ReturnValue(value);
   6792   }
   6793 
   6794   // Named store.
   6795   HValue* value = Pop();
   6796   HValue* object = Pop();
   6797 
   6798   Literal* key = prop->key()->AsLiteral();
   6799   Handle<String> name = Handle<String>::cast(key->value());
   6800   DCHECK(!name.is_null());
   6801 
   6802   HValue* access = BuildNamedAccess(STORE, ast_id, return_id, expr, slot,
   6803                                     object, name, value, is_uninitialized);
   6804   if (access == NULL) return;
   6805 
   6806   if (!ast_context()->IsEffect()) Push(value);
   6807   if (access->IsInstruction()) AddInstruction(HInstruction::cast(access));
   6808   if (access->HasObservableSideEffects()) {
   6809     Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
   6810   }
   6811   if (!ast_context()->IsEffect()) Drop(1);
   6812   return ast_context()->ReturnValue(value);
   6813 }
   6814 
   6815 
   6816 void HOptimizedGraphBuilder::HandlePropertyAssignment(Assignment* expr) {
   6817   Property* prop = expr->target()->AsProperty();
   6818   DCHECK(prop != NULL);
   6819   CHECK_ALIVE(VisitForValue(prop->obj()));
   6820   if (!prop->key()->IsPropertyName()) {
   6821     CHECK_ALIVE(VisitForValue(prop->key()));
   6822   }
   6823   CHECK_ALIVE(VisitForValue(expr->value()));
   6824   BuildStore(expr, prop, expr->AssignmentSlot(), expr->id(),
   6825              expr->AssignmentId(), expr->IsUninitialized());
   6826 }
   6827 
   6828 
   6829 // Because not every expression has a position and there is not common
   6830 // superclass of Assignment and CountOperation, we cannot just pass the
   6831 // owning expression instead of position and ast_id separately.
   6832 void HOptimizedGraphBuilder::HandleGlobalVariableAssignment(
   6833     Variable* var, HValue* value, FeedbackVectorSlot slot, BailoutId ast_id) {
   6834   Handle<JSGlobalObject> global(current_info()->global_object());
   6835 
   6836   // Lookup in script contexts.
   6837   {
   6838     Handle<ScriptContextTable> script_contexts(
   6839         global->native_context()->script_context_table());
   6840     ScriptContextTable::LookupResult lookup;
   6841     if (ScriptContextTable::Lookup(script_contexts, var->name(), &lookup)) {
   6842       if (lookup.mode == CONST) {
   6843         return Bailout(kNonInitializerAssignmentToConst);
   6844       }
   6845       Handle<Context> script_context =
   6846           ScriptContextTable::GetContext(script_contexts, lookup.context_index);
   6847 
   6848       Handle<Object> current_value =
   6849           FixedArray::get(script_context, lookup.slot_index);
   6850 
   6851       // If the values is not the hole, it will stay initialized,
   6852       // so no need to generate a check.
   6853       if (*current_value == *isolate()->factory()->the_hole_value()) {
   6854         return Bailout(kReferenceToUninitializedVariable);
   6855       }
   6856 
   6857       HStoreNamedField* instr = Add<HStoreNamedField>(
   6858           Add<HConstant>(script_context),
   6859           HObjectAccess::ForContextSlot(lookup.slot_index), value);
   6860       USE(instr);
   6861       DCHECK(instr->HasObservableSideEffects());
   6862       Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
   6863       return;
   6864     }
   6865   }
   6866 
   6867   LookupIterator it(global, var->name(), LookupIterator::OWN);
   6868   GlobalPropertyAccess type = LookupGlobalProperty(var, &it, STORE);
   6869   if (type == kUseCell) {
   6870     Handle<PropertyCell> cell = it.GetPropertyCell();
   6871     top_info()->dependencies()->AssumePropertyCell(cell);
   6872     auto cell_type = it.property_details().cell_type();
   6873     if (cell_type == PropertyCellType::kConstant ||
   6874         cell_type == PropertyCellType::kUndefined) {
   6875       Handle<Object> constant(cell->value(), isolate());
   6876       if (value->IsConstant()) {
   6877         HConstant* c_value = HConstant::cast(value);
   6878         if (!constant.is_identical_to(c_value->handle(isolate()))) {
   6879           Add<HDeoptimize>(Deoptimizer::kConstantGlobalVariableAssignment,
   6880                            Deoptimizer::EAGER);
   6881         }
   6882       } else {
   6883         HValue* c_constant = Add<HConstant>(constant);
   6884         IfBuilder builder(this);
   6885         if (constant->IsNumber()) {
   6886           builder.If<HCompareNumericAndBranch>(value, c_constant, Token::EQ);
   6887         } else {
   6888           builder.If<HCompareObjectEqAndBranch>(value, c_constant);
   6889         }
   6890         builder.Then();
   6891         builder.Else();
   6892         Add<HDeoptimize>(Deoptimizer::kConstantGlobalVariableAssignment,
   6893                          Deoptimizer::EAGER);
   6894         builder.End();
   6895       }
   6896     }
   6897     HConstant* cell_constant = Add<HConstant>(cell);
   6898     auto access = HObjectAccess::ForPropertyCellValue();
   6899     if (cell_type == PropertyCellType::kConstantType) {
   6900       switch (cell->GetConstantType()) {
   6901         case PropertyCellConstantType::kSmi:
   6902           access = access.WithRepresentation(Representation::Smi());
   6903           break;
   6904         case PropertyCellConstantType::kStableMap: {
   6905           // The map may no longer be stable, deopt if it's ever different from
   6906           // what is currently there, which will allow for restablization.
   6907           Handle<Map> map(HeapObject::cast(cell->value())->map());
   6908           Add<HCheckHeapObject>(value);
   6909           value = Add<HCheckMaps>(value, map);
   6910           access = access.WithRepresentation(Representation::HeapObject());
   6911           break;
   6912         }
   6913       }
   6914     }
   6915     HInstruction* instr = Add<HStoreNamedField>(cell_constant, access, value);
   6916     instr->ClearChangesFlag(kInobjectFields);
   6917     instr->SetChangesFlag(kGlobalVars);
   6918     if (instr->HasObservableSideEffects()) {
   6919       Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
   6920     }
   6921   } else {
   6922     HValue* global_object = Add<HLoadNamedField>(
   6923         BuildGetNativeContext(), nullptr,
   6924         HObjectAccess::ForContextSlot(Context::EXTENSION_INDEX));
   6925     HStoreNamedGeneric* instr =
   6926         Add<HStoreNamedGeneric>(global_object, var->name(), value,
   6927                                 function_language_mode(), PREMONOMORPHIC);
   6928     Handle<TypeFeedbackVector> vector =
   6929         handle(current_feedback_vector(), isolate());
   6930     instr->SetVectorAndSlot(vector, slot);
   6931     USE(instr);
   6932     DCHECK(instr->HasObservableSideEffects());
   6933     Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
   6934   }
   6935 }
   6936 
   6937 
   6938 void HOptimizedGraphBuilder::HandleCompoundAssignment(Assignment* expr) {
   6939   Expression* target = expr->target();
   6940   VariableProxy* proxy = target->AsVariableProxy();
   6941   Property* prop = target->AsProperty();
   6942   DCHECK(proxy == NULL || prop == NULL);
   6943 
   6944   // We have a second position recorded in the FullCodeGenerator to have
   6945   // type feedback for the binary operation.
   6946   BinaryOperation* operation = expr->binary_operation();
   6947 
   6948   if (proxy != NULL) {
   6949     Variable* var = proxy->var();
   6950     if (var->mode() == LET)  {
   6951       return Bailout(kUnsupportedLetCompoundAssignment);
   6952     }
   6953 
   6954     CHECK_ALIVE(VisitForValue(operation));
   6955 
   6956     switch (var->location()) {
   6957       case VariableLocation::GLOBAL:
   6958       case VariableLocation::UNALLOCATED:
   6959         HandleGlobalVariableAssignment(var, Top(), expr->AssignmentSlot(),
   6960                                        expr->AssignmentId());
   6961         break;
   6962 
   6963       case VariableLocation::PARAMETER:
   6964       case VariableLocation::LOCAL:
   6965         if (var->mode() == CONST_LEGACY)  {
   6966           return Bailout(kUnsupportedConstCompoundAssignment);
   6967         }
   6968         if (var->mode() == CONST) {
   6969           return Bailout(kNonInitializerAssignmentToConst);
   6970         }
   6971         BindIfLive(var, Top());
   6972         break;
   6973 
   6974       case VariableLocation::CONTEXT: {
   6975         // Bail out if we try to mutate a parameter value in a function
   6976         // using the arguments object.  We do not (yet) correctly handle the
   6977         // arguments property of the function.
   6978         if (current_info()->scope()->arguments() != NULL) {
   6979           // Parameters will be allocated to context slots.  We have no
   6980           // direct way to detect that the variable is a parameter so we do
   6981           // a linear search of the parameter variables.
   6982           int count = current_info()->scope()->num_parameters();
   6983           for (int i = 0; i < count; ++i) {
   6984             if (var == current_info()->scope()->parameter(i)) {
   6985               Bailout(kAssignmentToParameterFunctionUsesArgumentsObject);
   6986             }
   6987           }
   6988         }
   6989 
   6990         HStoreContextSlot::Mode mode;
   6991 
   6992         switch (var->mode()) {
   6993           case LET:
   6994             mode = HStoreContextSlot::kCheckDeoptimize;
   6995             break;
   6996           case CONST:
   6997             return Bailout(kNonInitializerAssignmentToConst);
   6998           case CONST_LEGACY:
   6999             return ast_context()->ReturnValue(Pop());
   7000           default:
   7001             mode = HStoreContextSlot::kNoCheck;
   7002         }
   7003 
   7004         HValue* context = BuildContextChainWalk(var);
   7005         HStoreContextSlot* instr = Add<HStoreContextSlot>(
   7006             context, var->index(), mode, Top());
   7007         if (instr->HasObservableSideEffects()) {
   7008           Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE);
   7009         }
   7010         break;
   7011       }
   7012 
   7013       case VariableLocation::LOOKUP:
   7014         return Bailout(kCompoundAssignmentToLookupSlot);
   7015     }
   7016     return ast_context()->ReturnValue(Pop());
   7017 
   7018   } else if (prop != NULL) {
   7019     CHECK_ALIVE(VisitForValue(prop->obj()));
   7020     HValue* object = Top();
   7021     HValue* key = NULL;
   7022     if (!prop->key()->IsPropertyName() || prop->IsStringAccess()) {
   7023       CHECK_ALIVE(VisitForValue(prop->key()));
   7024       key = Top();
   7025     }
   7026 
   7027     CHECK_ALIVE(PushLoad(prop, object, key));
   7028 
   7029     CHECK_ALIVE(VisitForValue(expr->value()));
   7030     HValue* right = Pop();
   7031     HValue* left = Pop();
   7032 
   7033     Push(BuildBinaryOperation(operation, left, right, PUSH_BEFORE_SIMULATE));
   7034 
   7035     BuildStore(expr, prop, expr->AssignmentSlot(), expr->id(),
   7036                expr->AssignmentId(), expr->IsUninitialized());
   7037   } else {
   7038     return Bailout(kInvalidLhsInCompoundAssignment);
   7039   }
   7040 }
   7041 
   7042 
   7043 void HOptimizedGraphBuilder::VisitAssignment(Assignment* expr) {
   7044   DCHECK(!HasStackOverflow());
   7045   DCHECK(current_block() != NULL);
   7046   DCHECK(current_block()->HasPredecessor());
   7047 
   7048   VariableProxy* proxy = expr->target()->AsVariableProxy();
   7049   Property* prop = expr->target()->AsProperty();
   7050   DCHECK(proxy == NULL || prop == NULL);
   7051 
   7052   if (expr->is_compound()) {
   7053     HandleCompoundAssignment(expr);
   7054     return;
   7055   }
   7056 
   7057   if (prop != NULL) {
   7058     HandlePropertyAssignment(expr);
   7059   } else if (proxy != NULL) {
   7060     Variable* var = proxy->var();
   7061 
   7062     if (var->mode() == CONST) {
   7063       if (expr->op() != Token::INIT) {
   7064         return Bailout(kNonInitializerAssignmentToConst);
   7065       }
   7066     } else if (var->mode() == CONST_LEGACY) {
   7067       if (expr->op() != Token::INIT) {
   7068         CHECK_ALIVE(VisitForValue(expr->value()));
   7069         return ast_context()->ReturnValue(Pop());
   7070       }
   7071 
   7072       if (var->IsStackAllocated()) {
   7073         // We insert a use of the old value to detect unsupported uses of const
   7074         // variables (e.g. initialization inside a loop).
   7075         HValue* old_value = environment()->Lookup(var);
   7076         Add<HUseConst>(old_value);
   7077       }
   7078     }
   7079 
   7080     if (proxy->IsArguments()) return Bailout(kAssignmentToArguments);
   7081 
   7082     // Handle the assignment.
   7083     switch (var->location()) {
   7084       case VariableLocation::GLOBAL:
   7085       case VariableLocation::UNALLOCATED:
   7086         CHECK_ALIVE(VisitForValue(expr->value()));
   7087         HandleGlobalVariableAssignment(var, Top(), expr->AssignmentSlot(),
   7088                                        expr->AssignmentId());
   7089         return ast_context()->ReturnValue(Pop());
   7090 
   7091       case VariableLocation::PARAMETER:
   7092       case VariableLocation::LOCAL: {
   7093         // Perform an initialization check for let declared variables
   7094         // or parameters.
   7095         if (var->mode() == LET && expr->op() == Token::ASSIGN) {
   7096           HValue* env_value = environment()->Lookup(var);
   7097           if (env_value == graph()->GetConstantHole()) {
   7098             return Bailout(kAssignmentToLetVariableBeforeInitialization);
   7099           }
   7100         }
   7101         // We do not allow the arguments object to occur in a context where it
   7102         // may escape, but assignments to stack-allocated locals are
   7103         // permitted.
   7104         CHECK_ALIVE(VisitForValue(expr->value(), ARGUMENTS_ALLOWED));
   7105         HValue* value = Pop();
   7106         BindIfLive(var, value);
   7107         return ast_context()->ReturnValue(value);
   7108       }
   7109 
   7110       case VariableLocation::CONTEXT: {
   7111         // Bail out if we try to mutate a parameter value in a function using
   7112         // the arguments object.  We do not (yet) correctly handle the
   7113         // arguments property of the function.
   7114         if (current_info()->scope()->arguments() != NULL) {
   7115           // Parameters will rewrite to context slots.  We have no direct way
   7116           // to detect that the variable is a parameter.
   7117           int count = current_info()->scope()->num_parameters();
   7118           for (int i = 0; i < count; ++i) {
   7119             if (var == current_info()->scope()->parameter(i)) {
   7120               return Bailout(kAssignmentToParameterInArgumentsObject);
   7121             }
   7122           }
   7123         }
   7124 
   7125         CHECK_ALIVE(VisitForValue(expr->value()));
   7126         HStoreContextSlot::Mode mode;
   7127         if (expr->op() == Token::ASSIGN) {
   7128           switch (var->mode()) {
   7129             case LET:
   7130               mode = HStoreContextSlot::kCheckDeoptimize;
   7131               break;
   7132             case CONST:
   7133               // This case is checked statically so no need to
   7134               // perform checks here
   7135               UNREACHABLE();
   7136             case CONST_LEGACY:
   7137               return ast_context()->ReturnValue(Pop());
   7138             default:
   7139               mode = HStoreContextSlot::kNoCheck;
   7140           }
   7141         } else {
   7142           DCHECK_EQ(Token::INIT, expr->op());
   7143           if (var->mode() == CONST_LEGACY) {
   7144             mode = HStoreContextSlot::kCheckIgnoreAssignment;
   7145           } else {
   7146             mode = HStoreContextSlot::kNoCheck;
   7147           }
   7148         }
   7149 
   7150         HValue* context = BuildContextChainWalk(var);
   7151         HStoreContextSlot* instr = Add<HStoreContextSlot>(
   7152             context, var->index(), mode, Top());
   7153         if (instr->HasObservableSideEffects()) {
   7154           Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE);
   7155         }
   7156         return ast_context()->ReturnValue(Pop());
   7157       }
   7158 
   7159       case VariableLocation::LOOKUP:
   7160         return Bailout(kAssignmentToLOOKUPVariable);
   7161     }
   7162   } else {
   7163     return Bailout(kInvalidLeftHandSideInAssignment);
   7164   }
   7165 }
   7166 
   7167 
   7168 void HOptimizedGraphBuilder::VisitYield(Yield* expr) {
   7169   // Generators are not optimized, so we should never get here.
   7170   UNREACHABLE();
   7171 }
   7172 
   7173 
   7174 void HOptimizedGraphBuilder::VisitThrow(Throw* expr) {
   7175   DCHECK(!HasStackOverflow());
   7176   DCHECK(current_block() != NULL);
   7177   DCHECK(current_block()->HasPredecessor());
   7178   if (!ast_context()->IsEffect()) {
   7179     // The parser turns invalid left-hand sides in assignments into throw
   7180     // statements, which may not be in effect contexts. We might still try
   7181     // to optimize such functions; bail out now if we do.
   7182     return Bailout(kInvalidLeftHandSideInAssignment);
   7183   }
   7184   CHECK_ALIVE(VisitForValue(expr->exception()));
   7185 
   7186   HValue* value = environment()->Pop();
   7187   if (!top_info()->is_tracking_positions()) SetSourcePosition(expr->position());
   7188   Add<HPushArguments>(value);
   7189   Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kThrow), 1);
   7190   Add<HSimulate>(expr->id());
   7191 
   7192   // If the throw definitely exits the function, we can finish with a dummy
   7193   // control flow at this point.  This is not the case if the throw is inside
   7194   // an inlined function which may be replaced.
   7195   if (call_context() == NULL) {
   7196     FinishExitCurrentBlock(New<HAbnormalExit>());
   7197   }
   7198 }
   7199 
   7200 
   7201 HInstruction* HGraphBuilder::AddLoadStringInstanceType(HValue* string) {
   7202   if (string->IsConstant()) {
   7203     HConstant* c_string = HConstant::cast(string);
   7204     if (c_string->HasStringValue()) {
   7205       return Add<HConstant>(c_string->StringValue()->map()->instance_type());
   7206     }
   7207   }
   7208   return Add<HLoadNamedField>(
   7209       Add<HLoadNamedField>(string, nullptr, HObjectAccess::ForMap()), nullptr,
   7210       HObjectAccess::ForMapInstanceType());
   7211 }
   7212 
   7213 
   7214 HInstruction* HGraphBuilder::AddLoadStringLength(HValue* string) {
   7215   return AddInstruction(BuildLoadStringLength(string));
   7216 }
   7217 
   7218 
   7219 HInstruction* HGraphBuilder::BuildLoadStringLength(HValue* string) {
   7220   if (string->IsConstant()) {
   7221     HConstant* c_string = HConstant::cast(string);
   7222     if (c_string->HasStringValue()) {
   7223       return New<HConstant>(c_string->StringValue()->length());
   7224     }
   7225   }
   7226   return New<HLoadNamedField>(string, nullptr,
   7227                               HObjectAccess::ForStringLength());
   7228 }
   7229 
   7230 
   7231 HInstruction* HOptimizedGraphBuilder::BuildNamedGeneric(
   7232     PropertyAccessType access_type, Expression* expr, FeedbackVectorSlot slot,
   7233     HValue* object, Handle<Name> name, HValue* value, bool is_uninitialized) {
   7234   if (is_uninitialized) {
   7235     Add<HDeoptimize>(
   7236         Deoptimizer::kInsufficientTypeFeedbackForGenericNamedAccess,
   7237         Deoptimizer::SOFT);
   7238   }
   7239   if (access_type == LOAD) {
   7240     Handle<TypeFeedbackVector> vector =
   7241         handle(current_feedback_vector(), isolate());
   7242 
   7243     if (!expr->AsProperty()->key()->IsPropertyName()) {
   7244       // It's possible that a keyed load of a constant string was converted
   7245       // to a named load. Here, at the last minute, we need to make sure to
   7246       // use a generic Keyed Load if we are using the type vector, because
   7247       // it has to share information with full code.
   7248       HConstant* key = Add<HConstant>(name);
   7249       HLoadKeyedGeneric* result = New<HLoadKeyedGeneric>(
   7250           object, key, function_language_mode(), PREMONOMORPHIC);
   7251       result->SetVectorAndSlot(vector, slot);
   7252       return result;
   7253     }
   7254 
   7255     HLoadNamedGeneric* result = New<HLoadNamedGeneric>(
   7256         object, name, function_language_mode(), PREMONOMORPHIC);
   7257     result->SetVectorAndSlot(vector, slot);
   7258     return result;
   7259   } else {
   7260     if (current_feedback_vector()->GetKind(slot) ==
   7261         FeedbackVectorSlotKind::KEYED_STORE_IC) {
   7262       // It's possible that a keyed store of a constant string was converted
   7263       // to a named store. Here, at the last minute, we need to make sure to
   7264       // use a generic Keyed Store if we are using the type vector, because
   7265       // it has to share information with full code.
   7266       HConstant* key = Add<HConstant>(name);
   7267       HStoreKeyedGeneric* result = New<HStoreKeyedGeneric>(
   7268           object, key, value, function_language_mode(), PREMONOMORPHIC);
   7269       Handle<TypeFeedbackVector> vector =
   7270           handle(current_feedback_vector(), isolate());
   7271       result->SetVectorAndSlot(vector, slot);
   7272       return result;
   7273     }
   7274 
   7275     HStoreNamedGeneric* result = New<HStoreNamedGeneric>(
   7276         object, name, value, function_language_mode(), PREMONOMORPHIC);
   7277     Handle<TypeFeedbackVector> vector =
   7278         handle(current_feedback_vector(), isolate());
   7279     result->SetVectorAndSlot(vector, slot);
   7280     return result;
   7281   }
   7282 }
   7283 
   7284 
   7285 HInstruction* HOptimizedGraphBuilder::BuildKeyedGeneric(
   7286     PropertyAccessType access_type, Expression* expr, FeedbackVectorSlot slot,
   7287     HValue* object, HValue* key, HValue* value) {
   7288   if (access_type == LOAD) {
   7289     InlineCacheState initial_state = expr->AsProperty()->GetInlineCacheState();
   7290     HLoadKeyedGeneric* result = New<HLoadKeyedGeneric>(
   7291         object, key, function_language_mode(), initial_state);
   7292     // HLoadKeyedGeneric with vector ics benefits from being encoded as
   7293     // MEGAMORPHIC because the vector/slot combo becomes unnecessary.
   7294     if (initial_state != MEGAMORPHIC) {
   7295       // We need to pass vector information.
   7296       Handle<TypeFeedbackVector> vector =
   7297           handle(current_feedback_vector(), isolate());
   7298       result->SetVectorAndSlot(vector, slot);
   7299     }
   7300     return result;
   7301   } else {
   7302     HStoreKeyedGeneric* result = New<HStoreKeyedGeneric>(
   7303         object, key, value, function_language_mode(), PREMONOMORPHIC);
   7304     Handle<TypeFeedbackVector> vector =
   7305         handle(current_feedback_vector(), isolate());
   7306     result->SetVectorAndSlot(vector, slot);
   7307     return result;
   7308   }
   7309 }
   7310 
   7311 
   7312 LoadKeyedHoleMode HOptimizedGraphBuilder::BuildKeyedHoleMode(Handle<Map> map) {
   7313   // Loads from a "stock" fast holey double arrays can elide the hole check.
   7314   // Loads from a "stock" fast holey array can convert the hole to undefined
   7315   // with impunity.
   7316   LoadKeyedHoleMode load_mode = NEVER_RETURN_HOLE;
   7317   bool holey_double_elements =
   7318       *map == isolate()->get_initial_js_array_map(FAST_HOLEY_DOUBLE_ELEMENTS);
   7319   bool holey_elements =
   7320       *map == isolate()->get_initial_js_array_map(FAST_HOLEY_ELEMENTS);
   7321   if ((holey_double_elements || holey_elements) &&
   7322       isolate()->IsFastArrayConstructorPrototypeChainIntact()) {
   7323     load_mode =
   7324         holey_double_elements ? ALLOW_RETURN_HOLE : CONVERT_HOLE_TO_UNDEFINED;
   7325 
   7326     Handle<JSObject> prototype(JSObject::cast(map->prototype()), isolate());
   7327     Handle<JSObject> object_prototype = isolate()->initial_object_prototype();
   7328     BuildCheckPrototypeMaps(prototype, object_prototype);
   7329     graph()->MarkDependsOnEmptyArrayProtoElements();
   7330   }
   7331   return load_mode;
   7332 }
   7333 
   7334 
   7335 HInstruction* HOptimizedGraphBuilder::BuildMonomorphicElementAccess(
   7336     HValue* object,
   7337     HValue* key,
   7338     HValue* val,
   7339     HValue* dependency,
   7340     Handle<Map> map,
   7341     PropertyAccessType access_type,
   7342     KeyedAccessStoreMode store_mode) {
   7343   HCheckMaps* checked_object = Add<HCheckMaps>(object, map, dependency);
   7344 
   7345   if (access_type == STORE && map->prototype()->IsJSObject()) {
   7346     // monomorphic stores need a prototype chain check because shape
   7347     // changes could allow callbacks on elements in the chain that
   7348     // aren't compatible with monomorphic keyed stores.
   7349     PrototypeIterator iter(map);
   7350     JSObject* holder = NULL;
   7351     while (!iter.IsAtEnd()) {
   7352       // JSProxies can't occur here because we wouldn't have installed a
   7353       // non-generic IC if there were any.
   7354       holder = *PrototypeIterator::GetCurrent<JSObject>(iter);
   7355       iter.Advance();
   7356     }
   7357     DCHECK(holder && holder->IsJSObject());
   7358 
   7359     BuildCheckPrototypeMaps(handle(JSObject::cast(map->prototype())),
   7360                             Handle<JSObject>(holder));
   7361   }
   7362 
   7363   LoadKeyedHoleMode load_mode = BuildKeyedHoleMode(map);
   7364   return BuildUncheckedMonomorphicElementAccess(
   7365       checked_object, key, val,
   7366       map->instance_type() == JS_ARRAY_TYPE,
   7367       map->elements_kind(), access_type,
   7368       load_mode, store_mode);
   7369 }
   7370 
   7371 
   7372 static bool CanInlineElementAccess(Handle<Map> map) {
   7373   return map->IsJSObjectMap() && !map->has_dictionary_elements() &&
   7374          !map->has_sloppy_arguments_elements() &&
   7375          !map->has_indexed_interceptor() && !map->is_access_check_needed();
   7376 }
   7377 
   7378 
   7379 HInstruction* HOptimizedGraphBuilder::TryBuildConsolidatedElementLoad(
   7380     HValue* object,
   7381     HValue* key,
   7382     HValue* val,
   7383     SmallMapList* maps) {
   7384   // For polymorphic loads of similar elements kinds (i.e. all tagged or all
   7385   // double), always use the "worst case" code without a transition.  This is
   7386   // much faster than transitioning the elements to the worst case, trading a
   7387   // HTransitionElements for a HCheckMaps, and avoiding mutation of the array.
   7388   bool has_double_maps = false;
   7389   bool has_smi_or_object_maps = false;
   7390   bool has_js_array_access = false;
   7391   bool has_non_js_array_access = false;
   7392   bool has_seen_holey_elements = false;
   7393   Handle<Map> most_general_consolidated_map;
   7394   for (int i = 0; i < maps->length(); ++i) {
   7395     Handle<Map> map = maps->at(i);
   7396     if (!CanInlineElementAccess(map)) return NULL;
   7397     // Don't allow mixing of JSArrays with JSObjects.
   7398     if (map->instance_type() == JS_ARRAY_TYPE) {
   7399       if (has_non_js_array_access) return NULL;
   7400       has_js_array_access = true;
   7401     } else if (has_js_array_access) {
   7402       return NULL;
   7403     } else {
   7404       has_non_js_array_access = true;
   7405     }
   7406     // Don't allow mixed, incompatible elements kinds.
   7407     if (map->has_fast_double_elements()) {
   7408       if (has_smi_or_object_maps) return NULL;
   7409       has_double_maps = true;
   7410     } else if (map->has_fast_smi_or_object_elements()) {
   7411       if (has_double_maps) return NULL;
   7412       has_smi_or_object_maps = true;
   7413     } else {
   7414       return NULL;
   7415     }
   7416     // Remember if we've ever seen holey elements.
   7417     if (IsHoleyElementsKind(map->elements_kind())) {
   7418       has_seen_holey_elements = true;
   7419     }
   7420     // Remember the most general elements kind, the code for its load will
   7421     // properly handle all of the more specific cases.
   7422     if ((i == 0) || IsMoreGeneralElementsKindTransition(
   7423             most_general_consolidated_map->elements_kind(),
   7424             map->elements_kind())) {
   7425       most_general_consolidated_map = map;
   7426     }
   7427   }
   7428   if (!has_double_maps && !has_smi_or_object_maps) return NULL;
   7429 
   7430   HCheckMaps* checked_object = Add<HCheckMaps>(object, maps);
   7431   // FAST_ELEMENTS is considered more general than FAST_HOLEY_SMI_ELEMENTS.
   7432   // If we've seen both, the consolidated load must use FAST_HOLEY_ELEMENTS.
   7433   ElementsKind consolidated_elements_kind = has_seen_holey_elements
   7434       ? GetHoleyElementsKind(most_general_consolidated_map->elements_kind())
   7435       : most_general_consolidated_map->elements_kind();
   7436   LoadKeyedHoleMode load_mode = NEVER_RETURN_HOLE;
   7437   if (has_seen_holey_elements) {
   7438     // Make sure that all of the maps we are handling have the initial array
   7439     // prototype.
   7440     bool saw_non_array_prototype = false;
   7441     for (int i = 0; i < maps->length(); ++i) {
   7442       Handle<Map> map = maps->at(i);
   7443       if (map->prototype() != *isolate()->initial_array_prototype()) {
   7444         // We can't guarantee that loading the hole is safe. The prototype may
   7445         // have an element at this position.
   7446         saw_non_array_prototype = true;
   7447         break;
   7448       }
   7449     }
   7450 
   7451     if (!saw_non_array_prototype) {
   7452       Handle<Map> holey_map = handle(
   7453           isolate()->get_initial_js_array_map(consolidated_elements_kind));
   7454       load_mode = BuildKeyedHoleMode(holey_map);
   7455       if (load_mode != NEVER_RETURN_HOLE) {
   7456         for (int i = 0; i < maps->length(); ++i) {
   7457           Handle<Map> map = maps->at(i);
   7458           // The prototype check was already done for the holey map in
   7459           // BuildKeyedHoleMode.
   7460           if (!map.is_identical_to(holey_map)) {
   7461             Handle<JSObject> prototype(JSObject::cast(map->prototype()),
   7462                                        isolate());
   7463             Handle<JSObject> object_prototype =
   7464                 isolate()->initial_object_prototype();
   7465             BuildCheckPrototypeMaps(prototype, object_prototype);
   7466           }
   7467         }
   7468       }
   7469     }
   7470   }
   7471   HInstruction* instr = BuildUncheckedMonomorphicElementAccess(
   7472       checked_object, key, val,
   7473       most_general_consolidated_map->instance_type() == JS_ARRAY_TYPE,
   7474       consolidated_elements_kind, LOAD, load_mode, STANDARD_STORE);
   7475   return instr;
   7476 }
   7477 
   7478 
   7479 HValue* HOptimizedGraphBuilder::HandlePolymorphicElementAccess(
   7480     Expression* expr, FeedbackVectorSlot slot, HValue* object, HValue* key,
   7481     HValue* val, SmallMapList* maps, PropertyAccessType access_type,
   7482     KeyedAccessStoreMode store_mode, bool* has_side_effects) {
   7483   *has_side_effects = false;
   7484   BuildCheckHeapObject(object);
   7485 
   7486   if (access_type == LOAD) {
   7487     HInstruction* consolidated_load =
   7488         TryBuildConsolidatedElementLoad(object, key, val, maps);
   7489     if (consolidated_load != NULL) {
   7490       *has_side_effects |= consolidated_load->HasObservableSideEffects();
   7491       return consolidated_load;
   7492     }
   7493   }
   7494 
   7495   // Elements_kind transition support.
   7496   MapHandleList transition_target(maps->length());
   7497   // Collect possible transition targets.
   7498   MapHandleList possible_transitioned_maps(maps->length());
   7499   for (int i = 0; i < maps->length(); ++i) {
   7500     Handle<Map> map = maps->at(i);
   7501     // Loads from strings or loads with a mix of string and non-string maps
   7502     // shouldn't be handled polymorphically.
   7503     DCHECK(access_type != LOAD || !map->IsStringMap());
   7504     ElementsKind elements_kind = map->elements_kind();
   7505     if (CanInlineElementAccess(map) && IsFastElementsKind(elements_kind) &&
   7506         elements_kind != GetInitialFastElementsKind()) {
   7507       possible_transitioned_maps.Add(map);
   7508     }
   7509     if (IsSloppyArgumentsElements(elements_kind)) {
   7510       HInstruction* result =
   7511           BuildKeyedGeneric(access_type, expr, slot, object, key, val);
   7512       *has_side_effects = result->HasObservableSideEffects();
   7513       return AddInstruction(result);
   7514     }
   7515   }
   7516   // Get transition target for each map (NULL == no transition).
   7517   for (int i = 0; i < maps->length(); ++i) {
   7518     Handle<Map> map = maps->at(i);
   7519     Handle<Map> transitioned_map =
   7520         Map::FindTransitionedMap(map, &possible_transitioned_maps);
   7521     transition_target.Add(transitioned_map);
   7522   }
   7523 
   7524   MapHandleList untransitionable_maps(maps->length());
   7525   HTransitionElementsKind* transition = NULL;
   7526   for (int i = 0; i < maps->length(); ++i) {
   7527     Handle<Map> map = maps->at(i);
   7528     DCHECK(map->IsMap());
   7529     if (!transition_target.at(i).is_null()) {
   7530       DCHECK(Map::IsValidElementsTransition(
   7531           map->elements_kind(),
   7532           transition_target.at(i)->elements_kind()));
   7533       transition = Add<HTransitionElementsKind>(object, map,
   7534                                                 transition_target.at(i));
   7535     } else {
   7536       untransitionable_maps.Add(map);
   7537     }
   7538   }
   7539 
   7540   // If only one map is left after transitioning, handle this case
   7541   // monomorphically.
   7542   DCHECK(untransitionable_maps.length() >= 1);
   7543   if (untransitionable_maps.length() == 1) {
   7544     Handle<Map> untransitionable_map = untransitionable_maps[0];
   7545     HInstruction* instr = NULL;
   7546     if (!CanInlineElementAccess(untransitionable_map)) {
   7547       instr = AddInstruction(
   7548           BuildKeyedGeneric(access_type, expr, slot, object, key, val));
   7549     } else {
   7550       instr = BuildMonomorphicElementAccess(
   7551           object, key, val, transition, untransitionable_map, access_type,
   7552           store_mode);
   7553     }
   7554     *has_side_effects |= instr->HasObservableSideEffects();
   7555     return access_type == STORE ? val : instr;
   7556   }
   7557 
   7558   HBasicBlock* join = graph()->CreateBasicBlock();
   7559 
   7560   for (int i = 0; i < untransitionable_maps.length(); ++i) {
   7561     Handle<Map> map = untransitionable_maps[i];
   7562     ElementsKind elements_kind = map->elements_kind();
   7563     HBasicBlock* this_map = graph()->CreateBasicBlock();
   7564     HBasicBlock* other_map = graph()->CreateBasicBlock();
   7565     HCompareMap* mapcompare =
   7566         New<HCompareMap>(object, map, this_map, other_map);
   7567     FinishCurrentBlock(mapcompare);
   7568 
   7569     set_current_block(this_map);
   7570     HInstruction* access = NULL;
   7571     if (!CanInlineElementAccess(map)) {
   7572       access = AddInstruction(
   7573           BuildKeyedGeneric(access_type, expr, slot, object, key, val));
   7574     } else {
   7575       DCHECK(IsFastElementsKind(elements_kind) ||
   7576              IsFixedTypedArrayElementsKind(elements_kind));
   7577       LoadKeyedHoleMode load_mode = BuildKeyedHoleMode(map);
   7578       // Happily, mapcompare is a checked object.
   7579       access = BuildUncheckedMonomorphicElementAccess(
   7580           mapcompare, key, val,
   7581           map->instance_type() == JS_ARRAY_TYPE,
   7582           elements_kind, access_type,
   7583           load_mode,
   7584           store_mode);
   7585     }
   7586     *has_side_effects |= access->HasObservableSideEffects();
   7587     // The caller will use has_side_effects and add a correct Simulate.
   7588     access->SetFlag(HValue::kHasNoObservableSideEffects);
   7589     if (access_type == LOAD) {
   7590       Push(access);
   7591     }
   7592     NoObservableSideEffectsScope scope(this);
   7593     GotoNoSimulate(join);
   7594     set_current_block(other_map);
   7595   }
   7596 
   7597   // Ensure that we visited at least one map above that goes to join. This is
   7598   // necessary because FinishExitWithHardDeoptimization does an AbnormalExit
   7599   // rather than joining the join block. If this becomes an issue, insert a
   7600   // generic access in the case length() == 0.
   7601   DCHECK(join->predecessors()->length() > 0);
   7602   // Deopt if none of the cases matched.
   7603   NoObservableSideEffectsScope scope(this);
   7604   FinishExitWithHardDeoptimization(
   7605       Deoptimizer::kUnknownMapInPolymorphicElementAccess);
   7606   set_current_block(join);
   7607   return access_type == STORE ? val : Pop();
   7608 }
   7609 
   7610 
   7611 HValue* HOptimizedGraphBuilder::HandleKeyedElementAccess(
   7612     HValue* obj, HValue* key, HValue* val, Expression* expr,
   7613     FeedbackVectorSlot slot, BailoutId ast_id, BailoutId return_id,
   7614     PropertyAccessType access_type, bool* has_side_effects) {
   7615   // A keyed name access with type feedback may contain the name.
   7616   Handle<TypeFeedbackVector> vector =
   7617       handle(current_feedback_vector(), isolate());
   7618   HValue* expected_key = key;
   7619   if (!key->ActualValue()->IsConstant()) {
   7620     Name* name = nullptr;
   7621     if (access_type == LOAD) {
   7622       KeyedLoadICNexus nexus(vector, slot);
   7623       name = nexus.FindFirstName();
   7624     } else {
   7625       KeyedStoreICNexus nexus(vector, slot);
   7626       name = nexus.FindFirstName();
   7627     }
   7628     if (name != nullptr) {
   7629       Handle<Name> handle_name(name);
   7630       expected_key = Add<HConstant>(handle_name);
   7631       // We need a check against the key.
   7632       bool in_new_space = isolate()->heap()->InNewSpace(*handle_name);
   7633       Unique<Name> unique_name = Unique<Name>::CreateUninitialized(handle_name);
   7634       Add<HCheckValue>(key, unique_name, in_new_space);
   7635     }
   7636   }
   7637   if (expected_key->ActualValue()->IsConstant()) {
   7638     Handle<Object> constant =
   7639         HConstant::cast(expected_key->ActualValue())->handle(isolate());
   7640     uint32_t array_index;
   7641     if ((constant->IsString() &&
   7642          !Handle<String>::cast(constant)->AsArrayIndex(&array_index)) ||
   7643         constant->IsSymbol()) {
   7644       if (!constant->IsUniqueName()) {
   7645         constant = isolate()->factory()->InternalizeString(
   7646             Handle<String>::cast(constant));
   7647       }
   7648       HValue* access =
   7649           BuildNamedAccess(access_type, ast_id, return_id, expr, slot, obj,
   7650                            Handle<Name>::cast(constant), val, false);
   7651       if (access == NULL || access->IsPhi() ||
   7652           HInstruction::cast(access)->IsLinked()) {
   7653         *has_side_effects = false;
   7654       } else {
   7655         HInstruction* instr = HInstruction::cast(access);
   7656         AddInstruction(instr);
   7657         *has_side_effects = instr->HasObservableSideEffects();
   7658       }
   7659       return access;
   7660     }
   7661   }
   7662 
   7663   DCHECK(!expr->IsPropertyName());
   7664   HInstruction* instr = NULL;
   7665 
   7666   SmallMapList* maps;
   7667   bool monomorphic = ComputeReceiverTypes(expr, obj, &maps, zone());
   7668 
   7669   bool force_generic = false;
   7670   if (expr->GetKeyType() == PROPERTY) {
   7671     // Non-Generic accesses assume that elements are being accessed, and will
   7672     // deopt for non-index keys, which the IC knows will occur.
   7673     // TODO(jkummerow): Consider adding proper support for property accesses.
   7674     force_generic = true;
   7675     monomorphic = false;
   7676   } else if (access_type == STORE &&
   7677              (monomorphic || (maps != NULL && !maps->is_empty()))) {
   7678     // Stores can't be mono/polymorphic if their prototype chain has dictionary
   7679     // elements. However a receiver map that has dictionary elements itself
   7680     // should be left to normal mono/poly behavior (the other maps may benefit
   7681     // from highly optimized stores).
   7682     for (int i = 0; i < maps->length(); i++) {
   7683       Handle<Map> current_map = maps->at(i);
   7684       if (current_map->DictionaryElementsInPrototypeChainOnly()) {
   7685         force_generic = true;
   7686         monomorphic = false;
   7687         break;
   7688       }
   7689     }
   7690   } else if (access_type == LOAD && !monomorphic &&
   7691              (maps != NULL && !maps->is_empty())) {
   7692     // Polymorphic loads have to go generic if any of the maps are strings.
   7693     // If some, but not all of the maps are strings, we should go generic
   7694     // because polymorphic access wants to key on ElementsKind and isn't
   7695     // compatible with strings.
   7696     for (int i = 0; i < maps->length(); i++) {
   7697       Handle<Map> current_map = maps->at(i);
   7698       if (current_map->IsStringMap()) {
   7699         force_generic = true;
   7700         break;
   7701       }
   7702     }
   7703   }
   7704 
   7705   if (monomorphic) {
   7706     Handle<Map> map = maps->first();
   7707     if (!CanInlineElementAccess(map)) {
   7708       instr = AddInstruction(
   7709           BuildKeyedGeneric(access_type, expr, slot, obj, key, val));
   7710     } else {
   7711       BuildCheckHeapObject(obj);
   7712       instr = BuildMonomorphicElementAccess(
   7713           obj, key, val, NULL, map, access_type, expr->GetStoreMode());
   7714     }
   7715   } else if (!force_generic && (maps != NULL && !maps->is_empty())) {
   7716     return HandlePolymorphicElementAccess(expr, slot, obj, key, val, maps,
   7717                                           access_type, expr->GetStoreMode(),
   7718                                           has_side_effects);
   7719   } else {
   7720     if (access_type == STORE) {
   7721       if (expr->IsAssignment() &&
   7722           expr->AsAssignment()->HasNoTypeInformation()) {
   7723         Add<HDeoptimize>(Deoptimizer::kInsufficientTypeFeedbackForKeyedStore,
   7724                          Deoptimizer::SOFT);
   7725       }
   7726     } else {
   7727       if (expr->AsProperty()->HasNoTypeInformation()) {
   7728         Add<HDeoptimize>(Deoptimizer::kInsufficientTypeFeedbackForKeyedLoad,
   7729                          Deoptimizer::SOFT);
   7730       }
   7731     }
   7732     instr = AddInstruction(
   7733         BuildKeyedGeneric(access_type, expr, slot, obj, key, val));
   7734   }
   7735   *has_side_effects = instr->HasObservableSideEffects();
   7736   return instr;
   7737 }
   7738 
   7739 
   7740 void HOptimizedGraphBuilder::EnsureArgumentsArePushedForAccess() {
   7741   // Outermost function already has arguments on the stack.
   7742   if (function_state()->outer() == NULL) return;
   7743 
   7744   if (function_state()->arguments_pushed()) return;
   7745 
   7746   // Push arguments when entering inlined function.
   7747   HEnterInlined* entry = function_state()->entry();
   7748   entry->set_arguments_pushed();
   7749 
   7750   HArgumentsObject* arguments = entry->arguments_object();
   7751   const ZoneList<HValue*>* arguments_values = arguments->arguments_values();
   7752 
   7753   HInstruction* insert_after = entry;
   7754   for (int i = 0; i < arguments_values->length(); i++) {
   7755     HValue* argument = arguments_values->at(i);
   7756     HInstruction* push_argument = New<HPushArguments>(argument);
   7757     push_argument->InsertAfter(insert_after);
   7758     insert_after = push_argument;
   7759   }
   7760 
   7761   HArgumentsElements* arguments_elements = New<HArgumentsElements>(true);
   7762   arguments_elements->ClearFlag(HValue::kUseGVN);
   7763   arguments_elements->InsertAfter(insert_after);
   7764   function_state()->set_arguments_elements(arguments_elements);
   7765 }
   7766 
   7767 
   7768 bool HOptimizedGraphBuilder::TryArgumentsAccess(Property* expr) {
   7769   VariableProxy* proxy = expr->obj()->AsVariableProxy();
   7770   if (proxy == NULL) return false;
   7771   if (!proxy->var()->IsStackAllocated()) return false;
   7772   if (!environment()->Lookup(proxy->var())->CheckFlag(HValue::kIsArguments)) {
   7773     return false;
   7774   }
   7775 
   7776   HInstruction* result = NULL;
   7777   if (expr->key()->IsPropertyName()) {
   7778     Handle<String> name = expr->key()->AsLiteral()->AsPropertyName();
   7779     if (!String::Equals(name, isolate()->factory()->length_string())) {
   7780       return false;
   7781     }
   7782 
   7783     if (function_state()->outer() == NULL) {
   7784       HInstruction* elements = Add<HArgumentsElements>(false);
   7785       result = New<HArgumentsLength>(elements);
   7786     } else {
   7787       // Number of arguments without receiver.
   7788       int argument_count = environment()->
   7789           arguments_environment()->parameter_count() - 1;
   7790       result = New<HConstant>(argument_count);
   7791     }
   7792   } else {
   7793     Push(graph()->GetArgumentsObject());
   7794     CHECK_ALIVE_OR_RETURN(VisitForValue(expr->key()), true);
   7795     HValue* key = Pop();
   7796     Drop(1);  // Arguments object.
   7797     if (function_state()->outer() == NULL) {
   7798       HInstruction* elements = Add<HArgumentsElements>(false);
   7799       HInstruction* length = Add<HArgumentsLength>(elements);
   7800       HInstruction* checked_key = Add<HBoundsCheck>(key, length);
   7801       result = New<HAccessArgumentsAt>(elements, length, checked_key);
   7802     } else {
   7803       EnsureArgumentsArePushedForAccess();
   7804 
   7805       // Number of arguments without receiver.
   7806       HInstruction* elements = function_state()->arguments_elements();
   7807       int argument_count = environment()->
   7808           arguments_environment()->parameter_count() - 1;
   7809       HInstruction* length = Add<HConstant>(argument_count);
   7810       HInstruction* checked_key = Add<HBoundsCheck>(key, length);
   7811       result = New<HAccessArgumentsAt>(elements, length, checked_key);
   7812     }
   7813   }
   7814   ast_context()->ReturnInstruction(result, expr->id());
   7815   return true;
   7816 }
   7817 
   7818 
   7819 HValue* HOptimizedGraphBuilder::BuildNamedAccess(
   7820     PropertyAccessType access, BailoutId ast_id, BailoutId return_id,
   7821     Expression* expr, FeedbackVectorSlot slot, HValue* object,
   7822     Handle<Name> name, HValue* value, bool is_uninitialized) {
   7823   SmallMapList* maps;
   7824   ComputeReceiverTypes(expr, object, &maps, zone());
   7825   DCHECK(maps != NULL);
   7826 
   7827   if (maps->length() > 0) {
   7828     PropertyAccessInfo info(this, access, maps->first(), name);
   7829     if (!info.CanAccessAsMonomorphic(maps)) {
   7830       HandlePolymorphicNamedFieldAccess(access, expr, slot, ast_id, return_id,
   7831                                         object, value, maps, name);
   7832       return NULL;
   7833     }
   7834 
   7835     HValue* checked_object;
   7836     // Type::Number() is only supported by polymorphic load/call handling.
   7837     DCHECK(!info.IsNumberType());
   7838     BuildCheckHeapObject(object);
   7839     if (AreStringTypes(maps)) {
   7840       checked_object =
   7841           Add<HCheckInstanceType>(object, HCheckInstanceType::IS_STRING);
   7842     } else {
   7843       checked_object = Add<HCheckMaps>(object, maps);
   7844     }
   7845     return BuildMonomorphicAccess(
   7846         &info, object, checked_object, value, ast_id, return_id);
   7847   }
   7848 
   7849   return BuildNamedGeneric(access, expr, slot, object, name, value,
   7850                            is_uninitialized);
   7851 }
   7852 
   7853 
   7854 void HOptimizedGraphBuilder::PushLoad(Property* expr,
   7855                                       HValue* object,
   7856                                       HValue* key) {
   7857   ValueContext for_value(this, ARGUMENTS_NOT_ALLOWED);
   7858   Push(object);
   7859   if (key != NULL) Push(key);
   7860   BuildLoad(expr, expr->LoadId());
   7861 }
   7862 
   7863 
   7864 void HOptimizedGraphBuilder::BuildLoad(Property* expr,
   7865                                        BailoutId ast_id) {
   7866   HInstruction* instr = NULL;
   7867   if (expr->IsStringAccess() && expr->GetKeyType() == ELEMENT) {
   7868     HValue* index = Pop();
   7869     HValue* string = Pop();
   7870     HInstruction* char_code = BuildStringCharCodeAt(string, index);
   7871     AddInstruction(char_code);
   7872     instr = NewUncasted<HStringCharFromCode>(char_code);
   7873 
   7874   } else if (expr->key()->IsPropertyName()) {
   7875     Handle<String> name = expr->key()->AsLiteral()->AsPropertyName();
   7876     HValue* object = Pop();
   7877 
   7878     HValue* value = BuildNamedAccess(LOAD, ast_id, expr->LoadId(), expr,
   7879                                      expr->PropertyFeedbackSlot(), object, name,
   7880                                      NULL, expr->IsUninitialized());
   7881     if (value == NULL) return;
   7882     if (value->IsPhi()) return ast_context()->ReturnValue(value);
   7883     instr = HInstruction::cast(value);
   7884     if (instr->IsLinked()) return ast_context()->ReturnValue(instr);
   7885 
   7886   } else {
   7887     HValue* key = Pop();
   7888     HValue* obj = Pop();
   7889 
   7890     bool has_side_effects = false;
   7891     HValue* load = HandleKeyedElementAccess(
   7892         obj, key, NULL, expr, expr->PropertyFeedbackSlot(), ast_id,
   7893         expr->LoadId(), LOAD, &has_side_effects);
   7894     if (has_side_effects) {
   7895       if (ast_context()->IsEffect()) {
   7896         Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
   7897       } else {
   7898         Push(load);
   7899         Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
   7900         Drop(1);
   7901       }
   7902     }
   7903     if (load == NULL) return;
   7904     return ast_context()->ReturnValue(load);
   7905   }
   7906   return ast_context()->ReturnInstruction(instr, ast_id);
   7907 }
   7908 
   7909 
   7910 void HOptimizedGraphBuilder::VisitProperty(Property* expr) {
   7911   DCHECK(!HasStackOverflow());
   7912   DCHECK(current_block() != NULL);
   7913   DCHECK(current_block()->HasPredecessor());
   7914 
   7915   if (TryArgumentsAccess(expr)) return;
   7916 
   7917   CHECK_ALIVE(VisitForValue(expr->obj()));
   7918   if (!expr->key()->IsPropertyName() || expr->IsStringAccess()) {
   7919     CHECK_ALIVE(VisitForValue(expr->key()));
   7920   }
   7921 
   7922   BuildLoad(expr, expr->id());
   7923 }
   7924 
   7925 
   7926 HInstruction* HGraphBuilder::BuildConstantMapCheck(Handle<JSObject> constant) {
   7927   HCheckMaps* check = Add<HCheckMaps>(
   7928       Add<HConstant>(constant), handle(constant->map()));
   7929   check->ClearDependsOnFlag(kElementsKind);
   7930   return check;
   7931 }
   7932 
   7933 
   7934 HInstruction* HGraphBuilder::BuildCheckPrototypeMaps(Handle<JSObject> prototype,
   7935                                                      Handle<JSObject> holder) {
   7936   PrototypeIterator iter(isolate(), prototype,
   7937                          PrototypeIterator::START_AT_RECEIVER);
   7938   while (holder.is_null() ||
   7939          !PrototypeIterator::GetCurrent(iter).is_identical_to(holder)) {
   7940     BuildConstantMapCheck(PrototypeIterator::GetCurrent<JSObject>(iter));
   7941     iter.Advance();
   7942     if (iter.IsAtEnd()) {
   7943       return NULL;
   7944     }
   7945   }
   7946   return BuildConstantMapCheck(PrototypeIterator::GetCurrent<JSObject>(iter));
   7947 }
   7948 
   7949 
   7950 void HOptimizedGraphBuilder::AddCheckPrototypeMaps(Handle<JSObject> holder,
   7951                                                    Handle<Map> receiver_map) {
   7952   if (!holder.is_null()) {
   7953     Handle<JSObject> prototype(JSObject::cast(receiver_map->prototype()));
   7954     BuildCheckPrototypeMaps(prototype, holder);
   7955   }
   7956 }
   7957 
   7958 
   7959 HInstruction* HOptimizedGraphBuilder::NewPlainFunctionCall(HValue* fun,
   7960                                                            int argument_count) {
   7961   return New<HCallJSFunction>(fun, argument_count);
   7962 }
   7963 
   7964 
   7965 HInstruction* HOptimizedGraphBuilder::NewArgumentAdaptorCall(
   7966     HValue* fun, HValue* context,
   7967     int argument_count, HValue* expected_param_count) {
   7968   HValue* new_target = graph()->GetConstantUndefined();
   7969   HValue* arity = Add<HConstant>(argument_count - 1);
   7970 
   7971   HValue* op_vals[] = {context, fun, new_target, arity, expected_param_count};
   7972 
   7973   Callable callable = CodeFactory::ArgumentAdaptor(isolate());
   7974   HConstant* stub = Add<HConstant>(callable.code());
   7975 
   7976   return New<HCallWithDescriptor>(stub, argument_count, callable.descriptor(),
   7977                                   Vector<HValue*>(op_vals, arraysize(op_vals)));
   7978 }
   7979 
   7980 
   7981 HInstruction* HOptimizedGraphBuilder::BuildCallConstantFunction(
   7982     Handle<JSFunction> jsfun, int argument_count) {
   7983   HValue* target = Add<HConstant>(jsfun);
   7984   // For constant functions, we try to avoid calling the
   7985   // argument adaptor and instead call the function directly
   7986   int formal_parameter_count =
   7987       jsfun->shared()->internal_formal_parameter_count();
   7988   bool dont_adapt_arguments =
   7989       (formal_parameter_count ==
   7990        SharedFunctionInfo::kDontAdaptArgumentsSentinel);
   7991   int arity = argument_count - 1;
   7992   bool can_invoke_directly =
   7993       dont_adapt_arguments || formal_parameter_count == arity;
   7994   if (can_invoke_directly) {
   7995     if (jsfun.is_identical_to(current_info()->closure())) {
   7996       graph()->MarkRecursive();
   7997     }
   7998     return NewPlainFunctionCall(target, argument_count);
   7999   } else {
   8000     HValue* param_count_value = Add<HConstant>(formal_parameter_count);
   8001     HValue* context = Add<HLoadNamedField>(
   8002         target, nullptr, HObjectAccess::ForFunctionContextPointer());
   8003     return NewArgumentAdaptorCall(target, context,
   8004         argument_count, param_count_value);
   8005   }
   8006   UNREACHABLE();
   8007   return NULL;
   8008 }
   8009 
   8010 
   8011 class FunctionSorter {
   8012  public:
   8013   explicit FunctionSorter(int index = 0, int ticks = 0, int size = 0)
   8014       : index_(index), ticks_(ticks), size_(size) {}
   8015 
   8016   int index() const { return index_; }
   8017   int ticks() const { return ticks_; }
   8018   int size() const { return size_; }
   8019 
   8020  private:
   8021   int index_;
   8022   int ticks_;
   8023   int size_;
   8024 };
   8025 
   8026 
   8027 inline bool operator<(const FunctionSorter& lhs, const FunctionSorter& rhs) {
   8028   int diff = lhs.ticks() - rhs.ticks();
   8029   if (diff != 0) return diff > 0;
   8030   return lhs.size() < rhs.size();
   8031 }
   8032 
   8033 
   8034 void HOptimizedGraphBuilder::HandlePolymorphicCallNamed(Call* expr,
   8035                                                         HValue* receiver,
   8036                                                         SmallMapList* maps,
   8037                                                         Handle<String> name) {
   8038   int argument_count = expr->arguments()->length() + 1;  // Includes receiver.
   8039   FunctionSorter order[kMaxCallPolymorphism];
   8040 
   8041   bool handle_smi = false;
   8042   bool handled_string = false;
   8043   int ordered_functions = 0;
   8044 
   8045   int i;
   8046   for (i = 0; i < maps->length() && ordered_functions < kMaxCallPolymorphism;
   8047        ++i) {
   8048     PropertyAccessInfo info(this, LOAD, maps->at(i), name);
   8049     if (info.CanAccessMonomorphic() && info.IsDataConstant() &&
   8050         info.constant()->IsJSFunction()) {
   8051       if (info.IsStringType()) {
   8052         if (handled_string) continue;
   8053         handled_string = true;
   8054       }
   8055       Handle<JSFunction> target = Handle<JSFunction>::cast(info.constant());
   8056       if (info.IsNumberType()) {
   8057         handle_smi = true;
   8058       }
   8059       expr->set_target(target);
   8060       order[ordered_functions++] = FunctionSorter(
   8061           i, target->shared()->profiler_ticks(), InliningAstSize(target));
   8062     }
   8063   }
   8064 
   8065   std::sort(order, order + ordered_functions);
   8066 
   8067   if (i < maps->length()) {
   8068     maps->Clear();
   8069     ordered_functions = -1;
   8070   }
   8071 
   8072   HBasicBlock* number_block = NULL;
   8073   HBasicBlock* join = NULL;
   8074   handled_string = false;
   8075   int count = 0;
   8076 
   8077   for (int fn = 0; fn < ordered_functions; ++fn) {
   8078     int i = order[fn].index();
   8079     PropertyAccessInfo info(this, LOAD, maps->at(i), name);
   8080     if (info.IsStringType()) {
   8081       if (handled_string) continue;
   8082       handled_string = true;
   8083     }
   8084     // Reloads the target.
   8085     info.CanAccessMonomorphic();
   8086     Handle<JSFunction> target = Handle<JSFunction>::cast(info.constant());
   8087 
   8088     expr->set_target(target);
   8089     if (count == 0) {
   8090       // Only needed once.
   8091       join = graph()->CreateBasicBlock();
   8092       if (handle_smi) {
   8093         HBasicBlock* empty_smi_block = graph()->CreateBasicBlock();
   8094         HBasicBlock* not_smi_block = graph()->CreateBasicBlock();
   8095         number_block = graph()->CreateBasicBlock();
   8096         FinishCurrentBlock(New<HIsSmiAndBranch>(
   8097                 receiver, empty_smi_block, not_smi_block));
   8098         GotoNoSimulate(empty_smi_block, number_block);
   8099         set_current_block(not_smi_block);
   8100       } else {
   8101         BuildCheckHeapObject(receiver);
   8102       }
   8103     }
   8104     ++count;
   8105     HBasicBlock* if_true = graph()->CreateBasicBlock();
   8106     HBasicBlock* if_false = graph()->CreateBasicBlock();
   8107     HUnaryControlInstruction* compare;
   8108 
   8109     Handle<Map> map = info.map();
   8110     if (info.IsNumberType()) {
   8111       Handle<Map> heap_number_map = isolate()->factory()->heap_number_map();
   8112       compare = New<HCompareMap>(receiver, heap_number_map, if_true, if_false);
   8113     } else if (info.IsStringType()) {
   8114       compare = New<HIsStringAndBranch>(receiver, if_true, if_false);
   8115     } else {
   8116       compare = New<HCompareMap>(receiver, map, if_true, if_false);
   8117     }
   8118     FinishCurrentBlock(compare);
   8119 
   8120     if (info.IsNumberType()) {
   8121       GotoNoSimulate(if_true, number_block);
   8122       if_true = number_block;
   8123     }
   8124 
   8125     set_current_block(if_true);
   8126 
   8127     AddCheckPrototypeMaps(info.holder(), map);
   8128 
   8129     HValue* function = Add<HConstant>(expr->target());
   8130     environment()->SetExpressionStackAt(0, function);
   8131     Push(receiver);
   8132     CHECK_ALIVE(VisitExpressions(expr->arguments()));
   8133     bool needs_wrapping = info.NeedsWrappingFor(target);
   8134     bool try_inline = FLAG_polymorphic_inlining && !needs_wrapping;
   8135     if (FLAG_trace_inlining && try_inline) {
   8136       Handle<JSFunction> caller = current_info()->closure();
   8137       base::SmartArrayPointer<char> caller_name =
   8138           caller->shared()->DebugName()->ToCString();
   8139       PrintF("Trying to inline the polymorphic call to %s from %s\n",
   8140              name->ToCString().get(),
   8141              caller_name.get());
   8142     }
   8143     if (try_inline && TryInlineCall(expr)) {
   8144       // Trying to inline will signal that we should bailout from the
   8145       // entire compilation by setting stack overflow on the visitor.
   8146       if (HasStackOverflow()) return;
   8147     } else {
   8148       // Since HWrapReceiver currently cannot actually wrap numbers and strings,
   8149       // use the regular CallFunctionStub for method calls to wrap the receiver.
   8150       // TODO(verwaest): Support creation of value wrappers directly in
   8151       // HWrapReceiver.
   8152       HInstruction* call =
   8153           needs_wrapping ? NewUncasted<HCallFunction>(
   8154                                function, argument_count,
   8155                                ConvertReceiverMode::kNotNullOrUndefined)
   8156                          : BuildCallConstantFunction(target, argument_count);
   8157       PushArgumentsFromEnvironment(argument_count);
   8158       AddInstruction(call);
   8159       Drop(1);  // Drop the function.
   8160       if (!ast_context()->IsEffect()) Push(call);
   8161     }
   8162 
   8163     if (current_block() != NULL) Goto(join);
   8164     set_current_block(if_false);
   8165   }
   8166 
   8167   // Finish up.  Unconditionally deoptimize if we've handled all the maps we
   8168   // know about and do not want to handle ones we've never seen.  Otherwise
   8169   // use a generic IC.
   8170   if (ordered_functions == maps->length() && FLAG_deoptimize_uncommon_cases) {
   8171     FinishExitWithHardDeoptimization(Deoptimizer::kUnknownMapInPolymorphicCall);
   8172   } else {
   8173     Property* prop = expr->expression()->AsProperty();
   8174     HInstruction* function =
   8175         BuildNamedGeneric(LOAD, prop, prop->PropertyFeedbackSlot(), receiver,
   8176                           name, NULL, prop->IsUninitialized());
   8177     AddInstruction(function);
   8178     Push(function);
   8179     AddSimulate(prop->LoadId(), REMOVABLE_SIMULATE);
   8180 
   8181     environment()->SetExpressionStackAt(1, function);
   8182     environment()->SetExpressionStackAt(0, receiver);
   8183     CHECK_ALIVE(VisitExpressions(expr->arguments()));
   8184 
   8185     HInstruction* call = New<HCallFunction>(
   8186         function, argument_count, ConvertReceiverMode::kNotNullOrUndefined);
   8187 
   8188     PushArgumentsFromEnvironment(argument_count);
   8189 
   8190     Drop(1);  // Function.
   8191 
   8192     if (join != NULL) {
   8193       AddInstruction(call);
   8194       if (!ast_context()->IsEffect()) Push(call);
   8195       Goto(join);
   8196     } else {
   8197       return ast_context()->ReturnInstruction(call, expr->id());
   8198     }
   8199   }
   8200 
   8201   // We assume that control flow is always live after an expression.  So
   8202   // even without predecessors to the join block, we set it as the exit
   8203   // block and continue by adding instructions there.
   8204   DCHECK(join != NULL);
   8205   if (join->HasPredecessor()) {
   8206     set_current_block(join);
   8207     join->SetJoinId(expr->id());
   8208     if (!ast_context()->IsEffect()) return ast_context()->ReturnValue(Pop());
   8209   } else {
   8210     set_current_block(NULL);
   8211   }
   8212 }
   8213 
   8214 
   8215 void HOptimizedGraphBuilder::TraceInline(Handle<JSFunction> target,
   8216                                          Handle<JSFunction> caller,
   8217                                          const char* reason) {
   8218   if (FLAG_trace_inlining) {
   8219     base::SmartArrayPointer<char> target_name =
   8220         target->shared()->DebugName()->ToCString();
   8221     base::SmartArrayPointer<char> caller_name =
   8222         caller->shared()->DebugName()->ToCString();
   8223     if (reason == NULL) {
   8224       PrintF("Inlined %s called from %s.\n", target_name.get(),
   8225              caller_name.get());
   8226     } else {
   8227       PrintF("Did not inline %s called from %s (%s).\n",
   8228              target_name.get(), caller_name.get(), reason);
   8229     }
   8230   }
   8231 }
   8232 
   8233 
   8234 static const int kNotInlinable = 1000000000;
   8235 
   8236 
   8237 int HOptimizedGraphBuilder::InliningAstSize(Handle<JSFunction> target) {
   8238   if (!FLAG_use_inlining) return kNotInlinable;
   8239 
   8240   // Precondition: call is monomorphic and we have found a target with the
   8241   // appropriate arity.
   8242   Handle<JSFunction> caller = current_info()->closure();
   8243   Handle<SharedFunctionInfo> target_shared(target->shared());
   8244 
   8245   // Always inline functions that force inlining.
   8246   if (target_shared->force_inline()) {
   8247     return 0;
   8248   }
   8249   if (target->shared()->IsBuiltin()) {
   8250     return kNotInlinable;
   8251   }
   8252 
   8253   if (target_shared->IsApiFunction()) {
   8254     TraceInline(target, caller, "target is api function");
   8255     return kNotInlinable;
   8256   }
   8257 
   8258   // Do a quick check on source code length to avoid parsing large
   8259   // inlining candidates.
   8260   if (target_shared->SourceSize() >
   8261       Min(FLAG_max_inlined_source_size, kUnlimitedMaxInlinedSourceSize)) {
   8262     TraceInline(target, caller, "target text too big");
   8263     return kNotInlinable;
   8264   }
   8265 
   8266   // Target must be inlineable.
   8267   BailoutReason noopt_reason = target_shared->disable_optimization_reason();
   8268   if (!target_shared->IsInlineable() && noopt_reason != kHydrogenFilter) {
   8269     TraceInline(target, caller, "target not inlineable");
   8270     return kNotInlinable;
   8271   }
   8272   if (noopt_reason != kNoReason && noopt_reason != kHydrogenFilter) {
   8273     TraceInline(target, caller, "target contains unsupported syntax [early]");
   8274     return kNotInlinable;
   8275   }
   8276 
   8277   int nodes_added = target_shared->ast_node_count();
   8278   return nodes_added;
   8279 }
   8280 
   8281 
   8282 bool HOptimizedGraphBuilder::TryInline(Handle<JSFunction> target,
   8283                                        int arguments_count,
   8284                                        HValue* implicit_return_value,
   8285                                        BailoutId ast_id, BailoutId return_id,
   8286                                        InliningKind inlining_kind) {
   8287   if (target->context()->native_context() !=
   8288       top_info()->closure()->context()->native_context()) {
   8289     return false;
   8290   }
   8291   int nodes_added = InliningAstSize(target);
   8292   if (nodes_added == kNotInlinable) return false;
   8293 
   8294   Handle<JSFunction> caller = current_info()->closure();
   8295 
   8296   if (nodes_added > Min(FLAG_max_inlined_nodes, kUnlimitedMaxInlinedNodes)) {
   8297     TraceInline(target, caller, "target AST is too large [early]");
   8298     return false;
   8299   }
   8300 
   8301   // Don't inline deeper than the maximum number of inlining levels.
   8302   HEnvironment* env = environment();
   8303   int current_level = 1;
   8304   while (env->outer() != NULL) {
   8305     if (current_level == FLAG_max_inlining_levels) {
   8306       TraceInline(target, caller, "inline depth limit reached");
   8307       return false;
   8308     }
   8309     if (env->outer()->frame_type() == JS_FUNCTION) {
   8310       current_level++;
   8311     }
   8312     env = env->outer();
   8313   }
   8314 
   8315   // Don't inline recursive functions.
   8316   for (FunctionState* state = function_state();
   8317        state != NULL;
   8318        state = state->outer()) {
   8319     if (*state->compilation_info()->closure() == *target) {
   8320       TraceInline(target, caller, "target is recursive");
   8321       return false;
   8322     }
   8323   }
   8324 
   8325   // We don't want to add more than a certain number of nodes from inlining.
   8326   // Always inline small methods (<= 10 nodes).
   8327   if (inlined_count_ > Min(FLAG_max_inlined_nodes_cumulative,
   8328                            kUnlimitedMaxInlinedNodesCumulative)) {
   8329     TraceInline(target, caller, "cumulative AST node limit reached");
   8330     return false;
   8331   }
   8332 
   8333   // Parse and allocate variables.
   8334   // Use the same AstValueFactory for creating strings in the sub-compilation
   8335   // step, but don't transfer ownership to target_info.
   8336   ParseInfo parse_info(zone(), target);
   8337   parse_info.set_ast_value_factory(
   8338       top_info()->parse_info()->ast_value_factory());
   8339   parse_info.set_ast_value_factory_owned(false);
   8340 
   8341   CompilationInfo target_info(&parse_info);
   8342   Handle<SharedFunctionInfo> target_shared(target->shared());
   8343 
   8344   if (IsClassConstructor(target_shared->kind())) {
   8345     TraceInline(target, caller, "target is classConstructor");
   8346     return false;
   8347   }
   8348   if (target_shared->HasDebugInfo()) {
   8349     TraceInline(target, caller, "target is being debugged");
   8350     return false;
   8351   }
   8352   if (!Compiler::ParseAndAnalyze(target_info.parse_info())) {
   8353     if (target_info.isolate()->has_pending_exception()) {
   8354       // Parse or scope error, never optimize this function.
   8355       SetStackOverflow();
   8356       target_shared->DisableOptimization(kParseScopeError);
   8357     }
   8358     TraceInline(target, caller, "parse failure");
   8359     return false;
   8360   }
   8361 
   8362   if (target_info.scope()->num_heap_slots() > 0) {
   8363     TraceInline(target, caller, "target has context-allocated variables");
   8364     return false;
   8365   }
   8366 
   8367   int rest_index;
   8368   Variable* rest = target_info.scope()->rest_parameter(&rest_index);
   8369   if (rest) {
   8370     TraceInline(target, caller, "target uses rest parameters");
   8371     return false;
   8372   }
   8373 
   8374   FunctionLiteral* function = target_info.literal();
   8375 
   8376   // The following conditions must be checked again after re-parsing, because
   8377   // earlier the information might not have been complete due to lazy parsing.
   8378   nodes_added = function->ast_node_count();
   8379   if (nodes_added > Min(FLAG_max_inlined_nodes, kUnlimitedMaxInlinedNodes)) {
   8380     TraceInline(target, caller, "target AST is too large [late]");
   8381     return false;
   8382   }
   8383   if (function->dont_optimize()) {
   8384     TraceInline(target, caller, "target contains unsupported syntax [late]");
   8385     return false;
   8386   }
   8387 
   8388   // If the function uses the arguments object check that inlining of functions
   8389   // with arguments object is enabled and the arguments-variable is
   8390   // stack allocated.
   8391   if (function->scope()->arguments() != NULL) {
   8392     if (!FLAG_inline_arguments) {
   8393       TraceInline(target, caller, "target uses arguments object");
   8394       return false;
   8395     }
   8396   }
   8397 
   8398   // Unsupported variable references present.
   8399   if (function->scope()->this_function_var() != nullptr ||
   8400       function->scope()->new_target_var() != nullptr) {
   8401     TraceInline(target, caller, "target uses new target or this function");
   8402     return false;
   8403   }
   8404 
   8405   // All declarations must be inlineable.
   8406   ZoneList<Declaration*>* decls = target_info.scope()->declarations();
   8407   int decl_count = decls->length();
   8408   for (int i = 0; i < decl_count; ++i) {
   8409     if (!decls->at(i)->IsInlineable()) {
   8410       TraceInline(target, caller, "target has non-trivial declaration");
   8411       return false;
   8412     }
   8413   }
   8414 
   8415   // In strong mode it is an error to call a function with too few arguments.
   8416   // In that case do not inline because then the arity check would be skipped.
   8417   if (is_strong(function->language_mode()) &&
   8418       arguments_count < function->parameter_count()) {
   8419     TraceInline(target, caller,
   8420                 "too few arguments passed to a strong function");
   8421     return false;
   8422   }
   8423 
   8424   // Generate the deoptimization data for the unoptimized version of
   8425   // the target function if we don't already have it.
   8426   if (!Compiler::EnsureDeoptimizationSupport(&target_info)) {
   8427     TraceInline(target, caller, "could not generate deoptimization info");
   8428     return false;
   8429   }
   8430   // Remember that we inlined this function. This needs to be called right
   8431   // after the EnsureDeoptimizationSupport call so that the code flusher
   8432   // does not remove the code with the deoptimization support.
   8433   top_info()->AddInlinedFunction(target_info.shared_info());
   8434 
   8435   // ----------------------------------------------------------------
   8436   // After this point, we've made a decision to inline this function (so
   8437   // TryInline should always return true).
   8438 
   8439   // Type-check the inlined function.
   8440   DCHECK(target_shared->has_deoptimization_support());
   8441   AstTyper(target_info.isolate(), target_info.zone(), target_info.closure(),
   8442            target_info.scope(), target_info.osr_ast_id(), target_info.literal())
   8443       .Run();
   8444 
   8445   int inlining_id = 0;
   8446   if (top_info()->is_tracking_positions()) {
   8447     inlining_id = top_info()->TraceInlinedFunction(
   8448         target_shared, source_position(), function_state()->inlining_id());
   8449   }
   8450 
   8451   // Save the pending call context. Set up new one for the inlined function.
   8452   // The function state is new-allocated because we need to delete it
   8453   // in two different places.
   8454   FunctionState* target_state =
   8455       new FunctionState(this, &target_info, inlining_kind, inlining_id);
   8456 
   8457   HConstant* undefined = graph()->GetConstantUndefined();
   8458 
   8459   HEnvironment* inner_env =
   8460       environment()->CopyForInlining(target,
   8461                                      arguments_count,
   8462                                      function,
   8463                                      undefined,
   8464                                      function_state()->inlining_kind());
   8465 
   8466   HConstant* context = Add<HConstant>(Handle<Context>(target->context()));
   8467   inner_env->BindContext(context);
   8468 
   8469   // Create a dematerialized arguments object for the function, also copy the
   8470   // current arguments values to use them for materialization.
   8471   HEnvironment* arguments_env = inner_env->arguments_environment();
   8472   int parameter_count = arguments_env->parameter_count();
   8473   HArgumentsObject* arguments_object = Add<HArgumentsObject>(parameter_count);
   8474   for (int i = 0; i < parameter_count; i++) {
   8475     arguments_object->AddArgument(arguments_env->Lookup(i), zone());
   8476   }
   8477 
   8478   // If the function uses arguments object then bind bind one.
   8479   if (function->scope()->arguments() != NULL) {
   8480     DCHECK(function->scope()->arguments()->IsStackAllocated());
   8481     inner_env->Bind(function->scope()->arguments(), arguments_object);
   8482   }
   8483 
   8484   // Capture the state before invoking the inlined function for deopt in the
   8485   // inlined function. This simulate has no bailout-id since it's not directly
   8486   // reachable for deopt, and is only used to capture the state. If the simulate
   8487   // becomes reachable by merging, the ast id of the simulate merged into it is
   8488   // adopted.
   8489   Add<HSimulate>(BailoutId::None());
   8490 
   8491   current_block()->UpdateEnvironment(inner_env);
   8492   Scope* saved_scope = scope();
   8493   set_scope(target_info.scope());
   8494   HEnterInlined* enter_inlined =
   8495       Add<HEnterInlined>(return_id, target, context, arguments_count, function,
   8496                          function_state()->inlining_kind(),
   8497                          function->scope()->arguments(), arguments_object);
   8498   if (top_info()->is_tracking_positions()) {
   8499     enter_inlined->set_inlining_id(inlining_id);
   8500   }
   8501   function_state()->set_entry(enter_inlined);
   8502 
   8503   VisitDeclarations(target_info.scope()->declarations());
   8504   VisitStatements(function->body());
   8505   set_scope(saved_scope);
   8506   if (HasStackOverflow()) {
   8507     // Bail out if the inline function did, as we cannot residualize a call
   8508     // instead, but do not disable optimization for the outer function.
   8509     TraceInline(target, caller, "inline graph construction failed");
   8510     target_shared->DisableOptimization(kInliningBailedOut);
   8511     current_info()->RetryOptimization(kInliningBailedOut);
   8512     delete target_state;
   8513     return true;
   8514   }
   8515 
   8516   // Update inlined nodes count.
   8517   inlined_count_ += nodes_added;
   8518 
   8519   Handle<Code> unoptimized_code(target_shared->code());
   8520   DCHECK(unoptimized_code->kind() == Code::FUNCTION);
   8521   Handle<TypeFeedbackInfo> type_info(
   8522       TypeFeedbackInfo::cast(unoptimized_code->type_feedback_info()));
   8523   graph()->update_type_change_checksum(type_info->own_type_change_checksum());
   8524 
   8525   TraceInline(target, caller, NULL);
   8526 
   8527   if (current_block() != NULL) {
   8528     FunctionState* state = function_state();
   8529     if (state->inlining_kind() == CONSTRUCT_CALL_RETURN) {
   8530       // Falling off the end of an inlined construct call. In a test context the
   8531       // return value will always evaluate to true, in a value context the
   8532       // return value is the newly allocated receiver.
   8533       if (call_context()->IsTest()) {
   8534         Goto(inlined_test_context()->if_true(), state);
   8535       } else if (call_context()->IsEffect()) {
   8536         Goto(function_return(), state);
   8537       } else {
   8538         DCHECK(call_context()->IsValue());
   8539         AddLeaveInlined(implicit_return_value, state);
   8540       }
   8541     } else if (state->inlining_kind() == SETTER_CALL_RETURN) {
   8542       // Falling off the end of an inlined setter call. The returned value is
   8543       // never used, the value of an assignment is always the value of the RHS
   8544       // of the assignment.
   8545       if (call_context()->IsTest()) {
   8546         inlined_test_context()->ReturnValue(implicit_return_value);
   8547       } else if (call_context()->IsEffect()) {
   8548         Goto(function_return(), state);
   8549       } else {
   8550         DCHECK(call_context()->IsValue());
   8551         AddLeaveInlined(implicit_return_value, state);
   8552       }
   8553     } else {
   8554       // Falling off the end of a normal inlined function. This basically means
   8555       // returning undefined.
   8556       if (call_context()->IsTest()) {
   8557         Goto(inlined_test_context()->if_false(), state);
   8558       } else if (call_context()->IsEffect()) {
   8559         Goto(function_return(), state);
   8560       } else {
   8561         DCHECK(call_context()->IsValue());
   8562         AddLeaveInlined(undefined, state);
   8563       }
   8564     }
   8565   }
   8566 
   8567   // Fix up the function exits.
   8568   if (inlined_test_context() != NULL) {
   8569     HBasicBlock* if_true = inlined_test_context()->if_true();
   8570     HBasicBlock* if_false = inlined_test_context()->if_false();
   8571 
   8572     HEnterInlined* entry = function_state()->entry();
   8573 
   8574     // Pop the return test context from the expression context stack.
   8575     DCHECK(ast_context() == inlined_test_context());
   8576     ClearInlinedTestContext();
   8577     delete target_state;
   8578 
   8579     // Forward to the real test context.
   8580     if (if_true->HasPredecessor()) {
   8581       entry->RegisterReturnTarget(if_true, zone());
   8582       if_true->SetJoinId(ast_id);
   8583       HBasicBlock* true_target = TestContext::cast(ast_context())->if_true();
   8584       Goto(if_true, true_target, function_state());
   8585     }
   8586     if (if_false->HasPredecessor()) {
   8587       entry->RegisterReturnTarget(if_false, zone());
   8588       if_false->SetJoinId(ast_id);
   8589       HBasicBlock* false_target = TestContext::cast(ast_context())->if_false();
   8590       Goto(if_false, false_target, function_state());
   8591     }
   8592     set_current_block(NULL);
   8593     return true;
   8594 
   8595   } else if (function_return()->HasPredecessor()) {
   8596     function_state()->entry()->RegisterReturnTarget(function_return(), zone());
   8597     function_return()->SetJoinId(ast_id);
   8598     set_current_block(function_return());
   8599   } else {
   8600     set_current_block(NULL);
   8601   }
   8602   delete target_state;
   8603   return true;
   8604 }
   8605 
   8606 
   8607 bool HOptimizedGraphBuilder::TryInlineCall(Call* expr) {
   8608   return TryInline(expr->target(), expr->arguments()->length(), NULL,
   8609                    expr->id(), expr->ReturnId(), NORMAL_RETURN);
   8610 }
   8611 
   8612 
   8613 bool HOptimizedGraphBuilder::TryInlineConstruct(CallNew* expr,
   8614                                                 HValue* implicit_return_value) {
   8615   return TryInline(expr->target(), expr->arguments()->length(),
   8616                    implicit_return_value, expr->id(), expr->ReturnId(),
   8617                    CONSTRUCT_CALL_RETURN);
   8618 }
   8619 
   8620 
   8621 bool HOptimizedGraphBuilder::TryInlineGetter(Handle<JSFunction> getter,
   8622                                              Handle<Map> receiver_map,
   8623                                              BailoutId ast_id,
   8624                                              BailoutId return_id) {
   8625   if (TryInlineApiGetter(getter, receiver_map, ast_id)) return true;
   8626   return TryInline(getter, 0, NULL, ast_id, return_id, GETTER_CALL_RETURN);
   8627 }
   8628 
   8629 
   8630 bool HOptimizedGraphBuilder::TryInlineSetter(Handle<JSFunction> setter,
   8631                                              Handle<Map> receiver_map,
   8632                                              BailoutId id,
   8633                                              BailoutId assignment_id,
   8634                                              HValue* implicit_return_value) {
   8635   if (TryInlineApiSetter(setter, receiver_map, id)) return true;
   8636   return TryInline(setter, 1, implicit_return_value, id, assignment_id,
   8637                    SETTER_CALL_RETURN);
   8638 }
   8639 
   8640 
   8641 bool HOptimizedGraphBuilder::TryInlineIndirectCall(Handle<JSFunction> function,
   8642                                                    Call* expr,
   8643                                                    int arguments_count) {
   8644   return TryInline(function, arguments_count, NULL, expr->id(),
   8645                    expr->ReturnId(), NORMAL_RETURN);
   8646 }
   8647 
   8648 
   8649 bool HOptimizedGraphBuilder::TryInlineBuiltinFunctionCall(Call* expr) {
   8650   if (!expr->target()->shared()->HasBuiltinFunctionId()) return false;
   8651   BuiltinFunctionId id = expr->target()->shared()->builtin_function_id();
   8652   switch (id) {
   8653     case kMathExp:
   8654       if (!FLAG_fast_math) break;
   8655       // Fall through if FLAG_fast_math.
   8656     case kMathRound:
   8657     case kMathFround:
   8658     case kMathFloor:
   8659     case kMathAbs:
   8660     case kMathSqrt:
   8661     case kMathLog:
   8662     case kMathClz32:
   8663       if (expr->arguments()->length() == 1) {
   8664         HValue* argument = Pop();
   8665         Drop(2);  // Receiver and function.
   8666         HInstruction* op = NewUncasted<HUnaryMathOperation>(argument, id);
   8667         ast_context()->ReturnInstruction(op, expr->id());
   8668         return true;
   8669       }
   8670       break;
   8671     case kMathImul:
   8672       if (expr->arguments()->length() == 2) {
   8673         HValue* right = Pop();
   8674         HValue* left = Pop();
   8675         Drop(2);  // Receiver and function.
   8676         HInstruction* op =
   8677             HMul::NewImul(isolate(), zone(), context(), left, right);
   8678         ast_context()->ReturnInstruction(op, expr->id());
   8679         return true;
   8680       }
   8681       break;
   8682     default:
   8683       // Not supported for inlining yet.
   8684       break;
   8685   }
   8686   return false;
   8687 }
   8688 
   8689 
   8690 // static
   8691 bool HOptimizedGraphBuilder::IsReadOnlyLengthDescriptor(
   8692     Handle<Map> jsarray_map) {
   8693   DCHECK(!jsarray_map->is_dictionary_map());
   8694   Isolate* isolate = jsarray_map->GetIsolate();
   8695   Handle<Name> length_string = isolate->factory()->length_string();
   8696   DescriptorArray* descriptors = jsarray_map->instance_descriptors();
   8697   int number = descriptors->SearchWithCache(*length_string, *jsarray_map);
   8698   DCHECK_NE(DescriptorArray::kNotFound, number);
   8699   return descriptors->GetDetails(number).IsReadOnly();
   8700 }
   8701 
   8702 
   8703 // static
   8704 bool HOptimizedGraphBuilder::CanInlineArrayResizeOperation(
   8705     Handle<Map> receiver_map) {
   8706   return !receiver_map.is_null() && receiver_map->prototype()->IsJSObject() &&
   8707          receiver_map->instance_type() == JS_ARRAY_TYPE &&
   8708          IsFastElementsKind(receiver_map->elements_kind()) &&
   8709          !receiver_map->is_dictionary_map() && !receiver_map->is_observed() &&
   8710          receiver_map->is_extensible() &&
   8711          (!receiver_map->is_prototype_map() || receiver_map->is_stable()) &&
   8712          !IsReadOnlyLengthDescriptor(receiver_map);
   8713 }
   8714 
   8715 
   8716 bool HOptimizedGraphBuilder::TryInlineBuiltinMethodCall(
   8717     Call* expr, Handle<JSFunction> function, Handle<Map> receiver_map,
   8718     int args_count_no_receiver) {
   8719   if (!function->shared()->HasBuiltinFunctionId()) return false;
   8720   BuiltinFunctionId id = function->shared()->builtin_function_id();
   8721   int argument_count = args_count_no_receiver + 1;  // Plus receiver.
   8722 
   8723   if (receiver_map.is_null()) {
   8724     HValue* receiver = environment()->ExpressionStackAt(args_count_no_receiver);
   8725     if (receiver->IsConstant() &&
   8726         HConstant::cast(receiver)->handle(isolate())->IsHeapObject()) {
   8727       receiver_map =
   8728           handle(Handle<HeapObject>::cast(
   8729                      HConstant::cast(receiver)->handle(isolate()))->map());
   8730     }
   8731   }
   8732   // Try to inline calls like Math.* as operations in the calling function.
   8733   switch (id) {
   8734     case kStringCharCodeAt:
   8735     case kStringCharAt:
   8736       if (argument_count == 2) {
   8737         HValue* index = Pop();
   8738         HValue* string = Pop();
   8739         Drop(1);  // Function.
   8740         HInstruction* char_code =
   8741             BuildStringCharCodeAt(string, index);
   8742         if (id == kStringCharCodeAt) {
   8743           ast_context()->ReturnInstruction(char_code, expr->id());
   8744           return true;
   8745         }
   8746         AddInstruction(char_code);
   8747         HInstruction* result = NewUncasted<HStringCharFromCode>(char_code);
   8748         ast_context()->ReturnInstruction(result, expr->id());
   8749         return true;
   8750       }
   8751       break;
   8752     case kStringFromCharCode:
   8753       if (argument_count == 2) {
   8754         HValue* argument = Pop();
   8755         Drop(2);  // Receiver and function.
   8756         HInstruction* result = NewUncasted<HStringCharFromCode>(argument);
   8757         ast_context()->ReturnInstruction(result, expr->id());
   8758         return true;
   8759       }
   8760       break;
   8761     case kMathExp:
   8762       if (!FLAG_fast_math) break;
   8763       // Fall through if FLAG_fast_math.
   8764     case kMathRound:
   8765     case kMathFround:
   8766     case kMathFloor:
   8767     case kMathAbs:
   8768     case kMathSqrt:
   8769     case kMathLog:
   8770     case kMathClz32:
   8771       if (argument_count == 2) {
   8772         HValue* argument = Pop();
   8773         Drop(2);  // Receiver and function.
   8774         HInstruction* op = NewUncasted<HUnaryMathOperation>(argument, id);
   8775         ast_context()->ReturnInstruction(op, expr->id());
   8776         return true;
   8777       }
   8778       break;
   8779     case kMathPow:
   8780       if (argument_count == 3) {
   8781         HValue* right = Pop();
   8782         HValue* left = Pop();
   8783         Drop(2);  // Receiver and function.
   8784         HInstruction* result = NULL;
   8785         // Use sqrt() if exponent is 0.5 or -0.5.
   8786         if (right->IsConstant() && HConstant::cast(right)->HasDoubleValue()) {
   8787           double exponent = HConstant::cast(right)->DoubleValue();
   8788           if (exponent == 0.5) {
   8789             result = NewUncasted<HUnaryMathOperation>(left, kMathPowHalf);
   8790           } else if (exponent == -0.5) {
   8791             HValue* one = graph()->GetConstant1();
   8792             HInstruction* sqrt = AddUncasted<HUnaryMathOperation>(
   8793                 left, kMathPowHalf);
   8794             // MathPowHalf doesn't have side effects so there's no need for
   8795             // an environment simulation here.
   8796             DCHECK(!sqrt->HasObservableSideEffects());
   8797             result = NewUncasted<HDiv>(one, sqrt);
   8798           } else if (exponent == 2.0) {
   8799             result = NewUncasted<HMul>(left, left);
   8800           }
   8801         }
   8802 
   8803         if (result == NULL) {
   8804           result = NewUncasted<HPower>(left, right);
   8805         }
   8806         ast_context()->ReturnInstruction(result, expr->id());
   8807         return true;
   8808       }
   8809       break;
   8810     case kMathMax:
   8811     case kMathMin:
   8812       if (argument_count == 3) {
   8813         HValue* right = Pop();
   8814         HValue* left = Pop();
   8815         Drop(2);  // Receiver and function.
   8816         HMathMinMax::Operation op = (id == kMathMin) ? HMathMinMax::kMathMin
   8817                                                      : HMathMinMax::kMathMax;
   8818         HInstruction* result = NewUncasted<HMathMinMax>(left, right, op);
   8819         ast_context()->ReturnInstruction(result, expr->id());
   8820         return true;
   8821       }
   8822       break;
   8823     case kMathImul:
   8824       if (argument_count == 3) {
   8825         HValue* right = Pop();
   8826         HValue* left = Pop();
   8827         Drop(2);  // Receiver and function.
   8828         HInstruction* result =
   8829             HMul::NewImul(isolate(), zone(), context(), left, right);
   8830         ast_context()->ReturnInstruction(result, expr->id());
   8831         return true;
   8832       }
   8833       break;
   8834     case kArrayPop: {
   8835       if (!CanInlineArrayResizeOperation(receiver_map)) return false;
   8836       ElementsKind elements_kind = receiver_map->elements_kind();
   8837 
   8838       Drop(args_count_no_receiver);
   8839       HValue* result;
   8840       HValue* reduced_length;
   8841       HValue* receiver = Pop();
   8842 
   8843       HValue* checked_object = AddCheckMap(receiver, receiver_map);
   8844       HValue* length =
   8845           Add<HLoadNamedField>(checked_object, nullptr,
   8846                                HObjectAccess::ForArrayLength(elements_kind));
   8847 
   8848       Drop(1);  // Function.
   8849 
   8850       { NoObservableSideEffectsScope scope(this);
   8851         IfBuilder length_checker(this);
   8852 
   8853         HValue* bounds_check = length_checker.If<HCompareNumericAndBranch>(
   8854             length, graph()->GetConstant0(), Token::EQ);
   8855         length_checker.Then();
   8856 
   8857         if (!ast_context()->IsEffect()) Push(graph()->GetConstantUndefined());
   8858 
   8859         length_checker.Else();
   8860         HValue* elements = AddLoadElements(checked_object);
   8861         // Ensure that we aren't popping from a copy-on-write array.
   8862         if (IsFastSmiOrObjectElementsKind(elements_kind)) {
   8863           elements = BuildCopyElementsOnWrite(checked_object, elements,
   8864                                               elements_kind, length);
   8865         }
   8866         reduced_length = AddUncasted<HSub>(length, graph()->GetConstant1());
   8867         result = AddElementAccess(elements, reduced_length, nullptr,
   8868                                   bounds_check, nullptr, elements_kind, LOAD);
   8869         HValue* hole = IsFastSmiOrObjectElementsKind(elements_kind)
   8870                            ? graph()->GetConstantHole()
   8871                            : Add<HConstant>(HConstant::kHoleNaN);
   8872         if (IsFastSmiOrObjectElementsKind(elements_kind)) {
   8873           elements_kind = FAST_HOLEY_ELEMENTS;
   8874         }
   8875         AddElementAccess(elements, reduced_length, hole, bounds_check, nullptr,
   8876                          elements_kind, STORE);
   8877         Add<HStoreNamedField>(
   8878             checked_object, HObjectAccess::ForArrayLength(elements_kind),
   8879             reduced_length, STORE_TO_INITIALIZED_ENTRY);
   8880 
   8881         if (!ast_context()->IsEffect()) Push(result);
   8882 
   8883         length_checker.End();
   8884       }
   8885       result = ast_context()->IsEffect() ? graph()->GetConstant0() : Top();
   8886       Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
   8887       if (!ast_context()->IsEffect()) Drop(1);
   8888 
   8889       ast_context()->ReturnValue(result);
   8890       return true;
   8891     }
   8892     case kArrayPush: {
   8893       if (!CanInlineArrayResizeOperation(receiver_map)) return false;
   8894       ElementsKind elements_kind = receiver_map->elements_kind();
   8895 
   8896       // If there may be elements accessors in the prototype chain, the fast
   8897       // inlined version can't be used.
   8898       if (receiver_map->DictionaryElementsInPrototypeChainOnly()) return false;
   8899       // If there currently can be no elements accessors on the prototype chain,
   8900       // it doesn't mean that there won't be any later. Install a full prototype
   8901       // chain check to trap element accessors being installed on the prototype
   8902       // chain, which would cause elements to go to dictionary mode and result
   8903       // in a map change.
   8904       Handle<JSObject> prototype(JSObject::cast(receiver_map->prototype()));
   8905       BuildCheckPrototypeMaps(prototype, Handle<JSObject>());
   8906 
   8907       // Protect against adding elements to the Array prototype, which needs to
   8908       // route through appropriate bottlenecks.
   8909       if (isolate()->IsFastArrayConstructorPrototypeChainIntact() &&
   8910           !prototype->IsJSArray()) {
   8911         return false;
   8912       }
   8913 
   8914       const int argc = args_count_no_receiver;
   8915       if (argc != 1) return false;
   8916 
   8917       HValue* value_to_push = Pop();
   8918       HValue* array = Pop();
   8919       Drop(1);  // Drop function.
   8920 
   8921       HInstruction* new_size = NULL;
   8922       HValue* length = NULL;
   8923 
   8924       {
   8925         NoObservableSideEffectsScope scope(this);
   8926 
   8927         length = Add<HLoadNamedField>(
   8928             array, nullptr, HObjectAccess::ForArrayLength(elements_kind));
   8929 
   8930         new_size = AddUncasted<HAdd>(length, graph()->GetConstant1());
   8931 
   8932         bool is_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
   8933         HValue* checked_array = Add<HCheckMaps>(array, receiver_map);
   8934         BuildUncheckedMonomorphicElementAccess(
   8935             checked_array, length, value_to_push, is_array, elements_kind,
   8936             STORE, NEVER_RETURN_HOLE, STORE_AND_GROW_NO_TRANSITION);
   8937 
   8938         if (!ast_context()->IsEffect()) Push(new_size);
   8939         Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
   8940         if (!ast_context()->IsEffect()) Drop(1);
   8941       }
   8942 
   8943       ast_context()->ReturnValue(new_size);
   8944       return true;
   8945     }
   8946     case kArrayShift: {
   8947       if (!CanInlineArrayResizeOperation(receiver_map)) return false;
   8948       ElementsKind kind = receiver_map->elements_kind();
   8949 
   8950       // If there may be elements accessors in the prototype chain, the fast
   8951       // inlined version can't be used.
   8952       if (receiver_map->DictionaryElementsInPrototypeChainOnly()) return false;
   8953 
   8954       // If there currently can be no elements accessors on the prototype chain,
   8955       // it doesn't mean that there won't be any later. Install a full prototype
   8956       // chain check to trap element accessors being installed on the prototype
   8957       // chain, which would cause elements to go to dictionary mode and result
   8958       // in a map change.
   8959       BuildCheckPrototypeMaps(
   8960           handle(JSObject::cast(receiver_map->prototype()), isolate()),
   8961           Handle<JSObject>::null());
   8962 
   8963       // Threshold for fast inlined Array.shift().
   8964       HConstant* inline_threshold = Add<HConstant>(static_cast<int32_t>(16));
   8965 
   8966       Drop(args_count_no_receiver);
   8967       HValue* receiver = Pop();
   8968       HValue* function = Pop();
   8969       HValue* result;
   8970 
   8971       {
   8972         NoObservableSideEffectsScope scope(this);
   8973 
   8974         HValue* length = Add<HLoadNamedField>(
   8975             receiver, nullptr, HObjectAccess::ForArrayLength(kind));
   8976 
   8977         IfBuilder if_lengthiszero(this);
   8978         HValue* lengthiszero = if_lengthiszero.If<HCompareNumericAndBranch>(
   8979             length, graph()->GetConstant0(), Token::EQ);
   8980         if_lengthiszero.Then();
   8981         {
   8982           if (!ast_context()->IsEffect()) Push(graph()->GetConstantUndefined());
   8983         }
   8984         if_lengthiszero.Else();
   8985         {
   8986           HValue* elements = AddLoadElements(receiver);
   8987 
   8988           // Check if we can use the fast inlined Array.shift().
   8989           IfBuilder if_inline(this);
   8990           if_inline.If<HCompareNumericAndBranch>(
   8991               length, inline_threshold, Token::LTE);
   8992           if (IsFastSmiOrObjectElementsKind(kind)) {
   8993             // We cannot handle copy-on-write backing stores here.
   8994             if_inline.AndIf<HCompareMap>(
   8995                 elements, isolate()->factory()->fixed_array_map());
   8996           }
   8997           if_inline.Then();
   8998           {
   8999             // Remember the result.
   9000             if (!ast_context()->IsEffect()) {
   9001               Push(AddElementAccess(elements, graph()->GetConstant0(), nullptr,
   9002                                     lengthiszero, nullptr, kind, LOAD));
   9003             }
   9004 
   9005             // Compute the new length.
   9006             HValue* new_length = AddUncasted<HSub>(
   9007                 length, graph()->GetConstant1());
   9008             new_length->ClearFlag(HValue::kCanOverflow);
   9009 
   9010             // Copy the remaining elements.
   9011             LoopBuilder loop(this, context(), LoopBuilder::kPostIncrement);
   9012             {
   9013               HValue* new_key = loop.BeginBody(
   9014                   graph()->GetConstant0(), new_length, Token::LT);
   9015               HValue* key = AddUncasted<HAdd>(new_key, graph()->GetConstant1());
   9016               key->ClearFlag(HValue::kCanOverflow);
   9017               ElementsKind copy_kind =
   9018                   kind == FAST_HOLEY_SMI_ELEMENTS ? FAST_HOLEY_ELEMENTS : kind;
   9019               HValue* element =
   9020                   AddUncasted<HLoadKeyed>(elements, key, lengthiszero, nullptr,
   9021                                           copy_kind, ALLOW_RETURN_HOLE);
   9022               HStoreKeyed* store = Add<HStoreKeyed>(elements, new_key, element,
   9023                                                     nullptr, copy_kind);
   9024               store->SetFlag(HValue::kAllowUndefinedAsNaN);
   9025             }
   9026             loop.EndBody();
   9027 
   9028             // Put a hole at the end.
   9029             HValue* hole = IsFastSmiOrObjectElementsKind(kind)
   9030                                ? graph()->GetConstantHole()
   9031                                : Add<HConstant>(HConstant::kHoleNaN);
   9032             if (IsFastSmiOrObjectElementsKind(kind)) kind = FAST_HOLEY_ELEMENTS;
   9033             Add<HStoreKeyed>(elements, new_length, hole, nullptr, kind,
   9034                              INITIALIZING_STORE);
   9035 
   9036             // Remember new length.
   9037             Add<HStoreNamedField>(
   9038                 receiver, HObjectAccess::ForArrayLength(kind),
   9039                 new_length, STORE_TO_INITIALIZED_ENTRY);
   9040           }
   9041           if_inline.Else();
   9042           {
   9043             Add<HPushArguments>(receiver);
   9044             result = Add<HCallJSFunction>(function, 1);
   9045             if (!ast_context()->IsEffect()) Push(result);
   9046           }
   9047           if_inline.End();
   9048         }
   9049         if_lengthiszero.End();
   9050       }
   9051       result = ast_context()->IsEffect() ? graph()->GetConstant0() : Top();
   9052       Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
   9053       if (!ast_context()->IsEffect()) Drop(1);
   9054       ast_context()->ReturnValue(result);
   9055       return true;
   9056     }
   9057     case kArrayIndexOf:
   9058     case kArrayLastIndexOf: {
   9059       if (receiver_map.is_null()) return false;
   9060       if (receiver_map->instance_type() != JS_ARRAY_TYPE) return false;
   9061       ElementsKind kind = receiver_map->elements_kind();
   9062       if (!IsFastElementsKind(kind)) return false;
   9063       if (receiver_map->is_observed()) return false;
   9064       if (argument_count != 2) return false;
   9065       if (!receiver_map->is_extensible()) return false;
   9066 
   9067       // If there may be elements accessors in the prototype chain, the fast
   9068       // inlined version can't be used.
   9069       if (receiver_map->DictionaryElementsInPrototypeChainOnly()) return false;
   9070 
   9071       // If there currently can be no elements accessors on the prototype chain,
   9072       // it doesn't mean that there won't be any later. Install a full prototype
   9073       // chain check to trap element accessors being installed on the prototype
   9074       // chain, which would cause elements to go to dictionary mode and result
   9075       // in a map change.
   9076       BuildCheckPrototypeMaps(
   9077           handle(JSObject::cast(receiver_map->prototype()), isolate()),
   9078           Handle<JSObject>::null());
   9079 
   9080       HValue* search_element = Pop();
   9081       HValue* receiver = Pop();
   9082       Drop(1);  // Drop function.
   9083 
   9084       ArrayIndexOfMode mode = (id == kArrayIndexOf)
   9085           ? kFirstIndexOf : kLastIndexOf;
   9086       HValue* index = BuildArrayIndexOf(receiver, search_element, kind, mode);
   9087 
   9088       if (!ast_context()->IsEffect()) Push(index);
   9089       Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
   9090       if (!ast_context()->IsEffect()) Drop(1);
   9091       ast_context()->ReturnValue(index);
   9092       return true;
   9093     }
   9094     default:
   9095       // Not yet supported for inlining.
   9096       break;
   9097   }
   9098   return false;
   9099 }
   9100 
   9101 
   9102 bool HOptimizedGraphBuilder::TryInlineApiFunctionCall(Call* expr,
   9103                                                       HValue* receiver) {
   9104   Handle<JSFunction> function = expr->target();
   9105   int argc = expr->arguments()->length();
   9106   SmallMapList receiver_maps;
   9107   return TryInlineApiCall(function,
   9108                           receiver,
   9109                           &receiver_maps,
   9110                           argc,
   9111                           expr->id(),
   9112                           kCallApiFunction);
   9113 }
   9114 
   9115 
   9116 bool HOptimizedGraphBuilder::TryInlineApiMethodCall(
   9117     Call* expr,
   9118     HValue* receiver,
   9119     SmallMapList* receiver_maps) {
   9120   Handle<JSFunction> function = expr->target();
   9121   int argc = expr->arguments()->length();
   9122   return TryInlineApiCall(function,
   9123                           receiver,
   9124                           receiver_maps,
   9125                           argc,
   9126                           expr->id(),
   9127                           kCallApiMethod);
   9128 }
   9129 
   9130 
   9131 bool