Home | History | Annotate | Download | only in crankshaft
      1 // Copyright 2013 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "src/crankshaft/hydrogen.h"
      6 
      7 #include <memory>
      8 #include <sstream>
      9 
     10 #include "src/allocation-site-scopes.h"
     11 #include "src/ast/ast-numbering.h"
     12 #include "src/ast/compile-time-value.h"
     13 #include "src/ast/scopes.h"
     14 #include "src/code-factory.h"
     15 #include "src/crankshaft/hydrogen-bce.h"
     16 #include "src/crankshaft/hydrogen-canonicalize.h"
     17 #include "src/crankshaft/hydrogen-check-elimination.h"
     18 #include "src/crankshaft/hydrogen-dce.h"
     19 #include "src/crankshaft/hydrogen-dehoist.h"
     20 #include "src/crankshaft/hydrogen-environment-liveness.h"
     21 #include "src/crankshaft/hydrogen-escape-analysis.h"
     22 #include "src/crankshaft/hydrogen-gvn.h"
     23 #include "src/crankshaft/hydrogen-infer-representation.h"
     24 #include "src/crankshaft/hydrogen-infer-types.h"
     25 #include "src/crankshaft/hydrogen-load-elimination.h"
     26 #include "src/crankshaft/hydrogen-mark-unreachable.h"
     27 #include "src/crankshaft/hydrogen-osr.h"
     28 #include "src/crankshaft/hydrogen-range-analysis.h"
     29 #include "src/crankshaft/hydrogen-redundant-phi.h"
     30 #include "src/crankshaft/hydrogen-removable-simulates.h"
     31 #include "src/crankshaft/hydrogen-representation-changes.h"
     32 #include "src/crankshaft/hydrogen-sce.h"
     33 #include "src/crankshaft/hydrogen-store-elimination.h"
     34 #include "src/crankshaft/hydrogen-uint32-analysis.h"
     35 #include "src/crankshaft/lithium-allocator.h"
     36 #include "src/crankshaft/typing.h"
     37 #include "src/field-type.h"
     38 #include "src/full-codegen/full-codegen.h"
     39 #include "src/globals.h"
     40 #include "src/ic/call-optimization.h"
     41 #include "src/ic/ic.h"
     42 // GetRootConstructor
     43 #include "src/ic/ic-inl.h"
     44 #include "src/isolate-inl.h"
     45 #include "src/runtime/runtime.h"
     46 
     47 #if V8_TARGET_ARCH_IA32
     48 #include "src/crankshaft/ia32/lithium-codegen-ia32.h"  // NOLINT
     49 #elif V8_TARGET_ARCH_X64
     50 #include "src/crankshaft/x64/lithium-codegen-x64.h"  // NOLINT
     51 #elif V8_TARGET_ARCH_ARM64
     52 #include "src/crankshaft/arm64/lithium-codegen-arm64.h"  // NOLINT
     53 #elif V8_TARGET_ARCH_ARM
     54 #include "src/crankshaft/arm/lithium-codegen-arm.h"  // NOLINT
     55 #elif V8_TARGET_ARCH_PPC
     56 #include "src/crankshaft/ppc/lithium-codegen-ppc.h"  // NOLINT
     57 #elif V8_TARGET_ARCH_MIPS
     58 #include "src/crankshaft/mips/lithium-codegen-mips.h"  // NOLINT
     59 #elif V8_TARGET_ARCH_MIPS64
     60 #include "src/crankshaft/mips64/lithium-codegen-mips64.h"  // NOLINT
     61 #elif V8_TARGET_ARCH_S390
     62 #include "src/crankshaft/s390/lithium-codegen-s390.h"  // NOLINT
     63 #elif V8_TARGET_ARCH_X87
     64 #include "src/crankshaft/x87/lithium-codegen-x87.h"  // NOLINT
     65 #else
     66 #error Unsupported target architecture.
     67 #endif
     68 
     69 namespace v8 {
     70 namespace internal {
     71 
     72 const auto GetRegConfig = RegisterConfiguration::Crankshaft;
     73 
     74 class HOptimizedGraphBuilderWithPositions : public HOptimizedGraphBuilder {
     75  public:
     76   explicit HOptimizedGraphBuilderWithPositions(CompilationInfo* info)
     77       : HOptimizedGraphBuilder(info, true) {
     78     SetSourcePosition(info->shared_info()->start_position());
     79   }
     80 
     81 #define DEF_VISIT(type)                                      \
     82   void Visit##type(type* node) override {                    \
     83     SourcePosition old_position = SourcePosition::Unknown(); \
     84     if (node->position() != kNoSourcePosition) {             \
     85       old_position = source_position();                      \
     86       SetSourcePosition(node->position());                   \
     87     }                                                        \
     88     HOptimizedGraphBuilder::Visit##type(node);               \
     89     if (old_position.IsKnown()) {                            \
     90       set_source_position(old_position);                     \
     91     }                                                        \
     92   }
     93   EXPRESSION_NODE_LIST(DEF_VISIT)
     94 #undef DEF_VISIT
     95 
     96 #define DEF_VISIT(type)                                      \
     97   void Visit##type(type* node) override {                    \
     98     SourcePosition old_position = SourcePosition::Unknown(); \
     99     if (node->position() != kNoSourcePosition) {             \
    100       old_position = source_position();                      \
    101       SetSourcePosition(node->position());                   \
    102     }                                                        \
    103     HOptimizedGraphBuilder::Visit##type(node);               \
    104     if (old_position.IsKnown()) {                            \
    105       set_source_position(old_position);                     \
    106     }                                                        \
    107   }
    108   STATEMENT_NODE_LIST(DEF_VISIT)
    109 #undef DEF_VISIT
    110 
    111 #define DEF_VISIT(type)                        \
    112   void Visit##type(type* node) override {      \
    113     HOptimizedGraphBuilder::Visit##type(node); \
    114   }
    115   DECLARATION_NODE_LIST(DEF_VISIT)
    116 #undef DEF_VISIT
    117 };
    118 
    119 HCompilationJob::Status HCompilationJob::PrepareJobImpl() {
    120   if (!isolate()->use_crankshaft() ||
    121       info()->shared_info()->dont_crankshaft()) {
    122     // Crankshaft is entirely disabled.
    123     return FAILED;
    124   }
    125 
    126   // Optimization requires a version of fullcode with deoptimization support.
    127   // Recompile the unoptimized version of the code if the current version
    128   // doesn't have deoptimization support already.
    129   // Otherwise, if we are gathering compilation time and space statistics
    130   // for hydrogen, gather baseline statistics for a fullcode compilation.
    131   bool should_recompile = !info()->shared_info()->has_deoptimization_support();
    132   if (should_recompile || FLAG_hydrogen_stats) {
    133     base::ElapsedTimer timer;
    134     if (FLAG_hydrogen_stats) {
    135       timer.Start();
    136     }
    137     if (!Compiler::EnsureDeoptimizationSupport(info())) {
    138       return FAILED;
    139     }
    140     if (FLAG_hydrogen_stats) {
    141       isolate()->GetHStatistics()->IncrementFullCodeGen(timer.Elapsed());
    142     }
    143   }
    144   DCHECK(info()->shared_info()->has_deoptimization_support());
    145   DCHECK(!info()->shared_info()->never_compiled());
    146 
    147   // Check the whitelist for Crankshaft.
    148   if (!info()->shared_info()->PassesFilter(FLAG_hydrogen_filter)) {
    149     return AbortOptimization(kHydrogenFilter);
    150   }
    151 
    152   Scope* scope = info()->scope();
    153   if (LUnallocated::TooManyParameters(scope->num_parameters())) {
    154     // Crankshaft would require too many Lithium operands.
    155     return AbortOptimization(kTooManyParameters);
    156   }
    157 
    158   if (info()->is_osr() &&
    159       LUnallocated::TooManyParametersOrStackSlots(scope->num_parameters(),
    160                                                   scope->num_stack_slots())) {
    161     // Crankshaft would require too many Lithium operands.
    162     return AbortOptimization(kTooManyParametersLocals);
    163   }
    164 
    165   if (IsGeneratorFunction(info()->shared_info()->kind())) {
    166     // Crankshaft does not support generators.
    167     return AbortOptimization(kGenerator);
    168   }
    169 
    170   if (FLAG_trace_hydrogen) {
    171     isolate()->GetHTracer()->TraceCompilation(info());
    172   }
    173 
    174   // Optimization could have been disabled by the parser. Note that this check
    175   // is only needed because the Hydrogen graph builder is missing some bailouts.
    176   if (info()->shared_info()->optimization_disabled()) {
    177     return AbortOptimization(
    178         info()->shared_info()->disable_optimization_reason());
    179   }
    180 
    181   HOptimizedGraphBuilder* graph_builder =
    182       (FLAG_hydrogen_track_positions || isolate()->is_profiling() ||
    183        FLAG_trace_ic)
    184           ? new (info()->zone()) HOptimizedGraphBuilderWithPositions(info())
    185           : new (info()->zone()) HOptimizedGraphBuilder(info(), false);
    186 
    187   // Type-check the function.
    188   AstTyper(info()->isolate(), info()->zone(), info()->closure(),
    189            info()->scope(), info()->osr_ast_id(), info()->literal(),
    190            graph_builder->bounds())
    191       .Run();
    192 
    193   graph_ = graph_builder->CreateGraph();
    194 
    195   if (isolate()->has_pending_exception()) {
    196     return FAILED;
    197   }
    198 
    199   if (graph_ == NULL) return FAILED;
    200 
    201   if (info()->dependencies()->HasAborted()) {
    202     // Dependency has changed during graph creation. Let's try again later.
    203     return RetryOptimization(kBailedOutDueToDependencyChange);
    204   }
    205 
    206   return SUCCEEDED;
    207 }
    208 
    209 HCompilationJob::Status HCompilationJob::ExecuteJobImpl() {
    210   DCHECK(graph_ != NULL);
    211   BailoutReason bailout_reason = kNoReason;
    212 
    213   if (graph_->Optimize(&bailout_reason)) {
    214     chunk_ = LChunk::NewChunk(graph_);
    215     if (chunk_ != NULL) return SUCCEEDED;
    216   } else if (bailout_reason != kNoReason) {
    217     info()->AbortOptimization(bailout_reason);
    218   }
    219 
    220   return FAILED;
    221 }
    222 
    223 HCompilationJob::Status HCompilationJob::FinalizeJobImpl() {
    224   DCHECK(chunk_ != NULL);
    225   DCHECK(graph_ != NULL);
    226   {
    227     // Deferred handles reference objects that were accessible during
    228     // graph creation.  To make sure that we don't encounter inconsistencies
    229     // between graph creation and code generation, we disallow accessing
    230     // objects through deferred handles during the latter, with exceptions.
    231     DisallowDeferredHandleDereference no_deferred_handle_deref;
    232     Handle<Code> optimized_code = chunk_->Codegen();
    233     if (optimized_code.is_null()) {
    234       if (info()->bailout_reason() == kNoReason) {
    235         return AbortOptimization(kCodeGenerationFailed);
    236       }
    237       return FAILED;
    238     }
    239     RegisterWeakObjectsInOptimizedCode(optimized_code);
    240     info()->SetCode(optimized_code);
    241   }
    242   // Add to the weak list of optimized code objects.
    243   info()->context()->native_context()->AddOptimizedCode(*info()->code());
    244   return SUCCEEDED;
    245 }
    246 
    247 HBasicBlock::HBasicBlock(HGraph* graph)
    248     : block_id_(graph->GetNextBlockID()),
    249       graph_(graph),
    250       phis_(4, graph->zone()),
    251       first_(NULL),
    252       last_(NULL),
    253       end_(NULL),
    254       loop_information_(NULL),
    255       predecessors_(2, graph->zone()),
    256       dominator_(NULL),
    257       dominated_blocks_(4, graph->zone()),
    258       last_environment_(NULL),
    259       argument_count_(-1),
    260       first_instruction_index_(-1),
    261       last_instruction_index_(-1),
    262       deleted_phis_(4, graph->zone()),
    263       parent_loop_header_(NULL),
    264       inlined_entry_block_(NULL),
    265       is_inline_return_target_(false),
    266       is_reachable_(true),
    267       dominates_loop_successors_(false),
    268       is_osr_entry_(false),
    269       is_ordered_(false) { }
    270 
    271 
    272 Isolate* HBasicBlock::isolate() const {
    273   return graph_->isolate();
    274 }
    275 
    276 
    277 void HBasicBlock::MarkUnreachable() {
    278   is_reachable_ = false;
    279 }
    280 
    281 
    282 void HBasicBlock::AttachLoopInformation() {
    283   DCHECK(!IsLoopHeader());
    284   loop_information_ = new(zone()) HLoopInformation(this, zone());
    285 }
    286 
    287 
    288 void HBasicBlock::DetachLoopInformation() {
    289   DCHECK(IsLoopHeader());
    290   loop_information_ = NULL;
    291 }
    292 
    293 
    294 void HBasicBlock::AddPhi(HPhi* phi) {
    295   DCHECK(!IsStartBlock());
    296   phis_.Add(phi, zone());
    297   phi->SetBlock(this);
    298 }
    299 
    300 
    301 void HBasicBlock::RemovePhi(HPhi* phi) {
    302   DCHECK(phi->block() == this);
    303   DCHECK(phis_.Contains(phi));
    304   phi->Kill();
    305   phis_.RemoveElement(phi);
    306   phi->SetBlock(NULL);
    307 }
    308 
    309 
    310 void HBasicBlock::AddInstruction(HInstruction* instr, SourcePosition position) {
    311   DCHECK(!IsStartBlock() || !IsFinished());
    312   DCHECK(!instr->IsLinked());
    313   DCHECK(!IsFinished());
    314 
    315   if (position.IsKnown()) {
    316     instr->set_position(position);
    317   }
    318   if (first_ == NULL) {
    319     DCHECK(last_environment() != NULL);
    320     DCHECK(!last_environment()->ast_id().IsNone());
    321     HBlockEntry* entry = new(zone()) HBlockEntry();
    322     entry->InitializeAsFirst(this);
    323     if (position.IsKnown()) {
    324       entry->set_position(position);
    325     } else {
    326       DCHECK(!FLAG_hydrogen_track_positions ||
    327              !graph()->info()->IsOptimizing() || instr->IsAbnormalExit());
    328     }
    329     first_ = last_ = entry;
    330   }
    331   instr->InsertAfter(last_);
    332 }
    333 
    334 
    335 HPhi* HBasicBlock::AddNewPhi(int merged_index) {
    336   if (graph()->IsInsideNoSideEffectsScope()) {
    337     merged_index = HPhi::kInvalidMergedIndex;
    338   }
    339   HPhi* phi = new(zone()) HPhi(merged_index, zone());
    340   AddPhi(phi);
    341   return phi;
    342 }
    343 
    344 
    345 HSimulate* HBasicBlock::CreateSimulate(BailoutId ast_id,
    346                                        RemovableSimulate removable) {
    347   DCHECK(HasEnvironment());
    348   HEnvironment* environment = last_environment();
    349   DCHECK(ast_id.IsNone() ||
    350          ast_id == BailoutId::StubEntry() ||
    351          environment->closure()->shared()->VerifyBailoutId(ast_id));
    352 
    353   int push_count = environment->push_count();
    354   int pop_count = environment->pop_count();
    355 
    356   HSimulate* instr =
    357       new(zone()) HSimulate(ast_id, pop_count, zone(), removable);
    358 #ifdef DEBUG
    359   instr->set_closure(environment->closure());
    360 #endif
    361   // Order of pushed values: newest (top of stack) first. This allows
    362   // HSimulate::MergeWith() to easily append additional pushed values
    363   // that are older (from further down the stack).
    364   for (int i = 0; i < push_count; ++i) {
    365     instr->AddPushedValue(environment->ExpressionStackAt(i));
    366   }
    367   for (GrowableBitVector::Iterator it(environment->assigned_variables(),
    368                                       zone());
    369        !it.Done();
    370        it.Advance()) {
    371     int index = it.Current();
    372     instr->AddAssignedValue(index, environment->Lookup(index));
    373   }
    374   environment->ClearHistory();
    375   return instr;
    376 }
    377 
    378 
    379 void HBasicBlock::Finish(HControlInstruction* end, SourcePosition position) {
    380   DCHECK(!IsFinished());
    381   AddInstruction(end, position);
    382   end_ = end;
    383   for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
    384     it.Current()->RegisterPredecessor(this);
    385   }
    386 }
    387 
    388 
    389 void HBasicBlock::Goto(HBasicBlock* block, SourcePosition position,
    390                        FunctionState* state, bool add_simulate) {
    391   bool drop_extra = state != NULL &&
    392       state->inlining_kind() == NORMAL_RETURN;
    393 
    394   if (block->IsInlineReturnTarget()) {
    395     HEnvironment* env = last_environment();
    396     int argument_count = env->arguments_environment()->parameter_count();
    397     AddInstruction(new(zone())
    398                    HLeaveInlined(state->entry(), argument_count),
    399                    position);
    400     UpdateEnvironment(last_environment()->DiscardInlined(drop_extra));
    401   }
    402 
    403   if (add_simulate) AddNewSimulate(BailoutId::None(), position);
    404   HGoto* instr = new(zone()) HGoto(block);
    405   Finish(instr, position);
    406 }
    407 
    408 
    409 void HBasicBlock::AddLeaveInlined(HValue* return_value, FunctionState* state,
    410                                   SourcePosition position) {
    411   HBasicBlock* target = state->function_return();
    412   bool drop_extra = state->inlining_kind() == NORMAL_RETURN;
    413 
    414   DCHECK(target->IsInlineReturnTarget());
    415   DCHECK(return_value != NULL);
    416   HEnvironment* env = last_environment();
    417   int argument_count = env->arguments_environment()->parameter_count();
    418   AddInstruction(new(zone()) HLeaveInlined(state->entry(), argument_count),
    419                  position);
    420   UpdateEnvironment(last_environment()->DiscardInlined(drop_extra));
    421   last_environment()->Push(return_value);
    422   AddNewSimulate(BailoutId::None(), position);
    423   HGoto* instr = new(zone()) HGoto(target);
    424   Finish(instr, position);
    425 }
    426 
    427 
    428 void HBasicBlock::SetInitialEnvironment(HEnvironment* env) {
    429   DCHECK(!HasEnvironment());
    430   DCHECK(first() == NULL);
    431   UpdateEnvironment(env);
    432 }
    433 
    434 
    435 void HBasicBlock::UpdateEnvironment(HEnvironment* env) {
    436   last_environment_ = env;
    437   graph()->update_maximum_environment_size(env->first_expression_index());
    438 }
    439 
    440 
    441 void HBasicBlock::SetJoinId(BailoutId ast_id) {
    442   int length = predecessors_.length();
    443   DCHECK(length > 0);
    444   for (int i = 0; i < length; i++) {
    445     HBasicBlock* predecessor = predecessors_[i];
    446     DCHECK(predecessor->end()->IsGoto());
    447     HSimulate* simulate = HSimulate::cast(predecessor->end()->previous());
    448     DCHECK(i != 0 ||
    449            (predecessor->last_environment()->closure().is_null() ||
    450             predecessor->last_environment()->closure()->shared()
    451               ->VerifyBailoutId(ast_id)));
    452     simulate->set_ast_id(ast_id);
    453     predecessor->last_environment()->set_ast_id(ast_id);
    454   }
    455 }
    456 
    457 
    458 bool HBasicBlock::Dominates(HBasicBlock* other) const {
    459   HBasicBlock* current = other->dominator();
    460   while (current != NULL) {
    461     if (current == this) return true;
    462     current = current->dominator();
    463   }
    464   return false;
    465 }
    466 
    467 
    468 bool HBasicBlock::EqualToOrDominates(HBasicBlock* other) const {
    469   if (this == other) return true;
    470   return Dominates(other);
    471 }
    472 
    473 
    474 int HBasicBlock::LoopNestingDepth() const {
    475   const HBasicBlock* current = this;
    476   int result  = (current->IsLoopHeader()) ? 1 : 0;
    477   while (current->parent_loop_header() != NULL) {
    478     current = current->parent_loop_header();
    479     result++;
    480   }
    481   return result;
    482 }
    483 
    484 
    485 void HBasicBlock::PostProcessLoopHeader(IterationStatement* stmt) {
    486   DCHECK(IsLoopHeader());
    487 
    488   SetJoinId(stmt->EntryId());
    489   if (predecessors()->length() == 1) {
    490     // This is a degenerated loop.
    491     DetachLoopInformation();
    492     return;
    493   }
    494 
    495   // Only the first entry into the loop is from outside the loop. All other
    496   // entries must be back edges.
    497   for (int i = 1; i < predecessors()->length(); ++i) {
    498     loop_information()->RegisterBackEdge(predecessors()->at(i));
    499   }
    500 }
    501 
    502 
    503 void HBasicBlock::MarkSuccEdgeUnreachable(int succ) {
    504   DCHECK(IsFinished());
    505   HBasicBlock* succ_block = end()->SuccessorAt(succ);
    506 
    507   DCHECK(succ_block->predecessors()->length() == 1);
    508   succ_block->MarkUnreachable();
    509 }
    510 
    511 
    512 void HBasicBlock::RegisterPredecessor(HBasicBlock* pred) {
    513   if (HasPredecessor()) {
    514     // Only loop header blocks can have a predecessor added after
    515     // instructions have been added to the block (they have phis for all
    516     // values in the environment, these phis may be eliminated later).
    517     DCHECK(IsLoopHeader() || first_ == NULL);
    518     HEnvironment* incoming_env = pred->last_environment();
    519     if (IsLoopHeader()) {
    520       DCHECK_EQ(phis()->length(), incoming_env->length());
    521       for (int i = 0; i < phis_.length(); ++i) {
    522         phis_[i]->AddInput(incoming_env->values()->at(i));
    523       }
    524     } else {
    525       last_environment()->AddIncomingEdge(this, pred->last_environment());
    526     }
    527   } else if (!HasEnvironment() && !IsFinished()) {
    528     DCHECK(!IsLoopHeader());
    529     SetInitialEnvironment(pred->last_environment()->Copy());
    530   }
    531 
    532   predecessors_.Add(pred, zone());
    533 }
    534 
    535 
    536 void HBasicBlock::AddDominatedBlock(HBasicBlock* block) {
    537   DCHECK(!dominated_blocks_.Contains(block));
    538   // Keep the list of dominated blocks sorted such that if there is two
    539   // succeeding block in this list, the predecessor is before the successor.
    540   int index = 0;
    541   while (index < dominated_blocks_.length() &&
    542          dominated_blocks_[index]->block_id() < block->block_id()) {
    543     ++index;
    544   }
    545   dominated_blocks_.InsertAt(index, block, zone());
    546 }
    547 
    548 
    549 void HBasicBlock::AssignCommonDominator(HBasicBlock* other) {
    550   if (dominator_ == NULL) {
    551     dominator_ = other;
    552     other->AddDominatedBlock(this);
    553   } else if (other->dominator() != NULL) {
    554     HBasicBlock* first = dominator_;
    555     HBasicBlock* second = other;
    556 
    557     while (first != second) {
    558       if (first->block_id() > second->block_id()) {
    559         first = first->dominator();
    560       } else {
    561         second = second->dominator();
    562       }
    563       DCHECK(first != NULL && second != NULL);
    564     }
    565 
    566     if (dominator_ != first) {
    567       DCHECK(dominator_->dominated_blocks_.Contains(this));
    568       dominator_->dominated_blocks_.RemoveElement(this);
    569       dominator_ = first;
    570       first->AddDominatedBlock(this);
    571     }
    572   }
    573 }
    574 
    575 
    576 void HBasicBlock::AssignLoopSuccessorDominators() {
    577   // Mark blocks that dominate all subsequent reachable blocks inside their
    578   // loop. Exploit the fact that blocks are sorted in reverse post order. When
    579   // the loop is visited in increasing block id order, if the number of
    580   // non-loop-exiting successor edges at the dominator_candidate block doesn't
    581   // exceed the number of previously encountered predecessor edges, there is no
    582   // path from the loop header to any block with higher id that doesn't go
    583   // through the dominator_candidate block. In this case, the
    584   // dominator_candidate block is guaranteed to dominate all blocks reachable
    585   // from it with higher ids.
    586   HBasicBlock* last = loop_information()->GetLastBackEdge();
    587   int outstanding_successors = 1;  // one edge from the pre-header
    588   // Header always dominates everything.
    589   MarkAsLoopSuccessorDominator();
    590   for (int j = block_id(); j <= last->block_id(); ++j) {
    591     HBasicBlock* dominator_candidate = graph_->blocks()->at(j);
    592     for (HPredecessorIterator it(dominator_candidate); !it.Done();
    593          it.Advance()) {
    594       HBasicBlock* predecessor = it.Current();
    595       // Don't count back edges.
    596       if (predecessor->block_id() < dominator_candidate->block_id()) {
    597         outstanding_successors--;
    598       }
    599     }
    600 
    601     // If more successors than predecessors have been seen in the loop up to
    602     // now, it's not possible to guarantee that the current block dominates
    603     // all of the blocks with higher IDs. In this case, assume conservatively
    604     // that those paths through loop that don't go through the current block
    605     // contain all of the loop's dependencies. Also be careful to record
    606     // dominator information about the current loop that's being processed,
    607     // and not nested loops, which will be processed when
    608     // AssignLoopSuccessorDominators gets called on their header.
    609     DCHECK(outstanding_successors >= 0);
    610     HBasicBlock* parent_loop_header = dominator_candidate->parent_loop_header();
    611     if (outstanding_successors == 0 &&
    612         (parent_loop_header == this && !dominator_candidate->IsLoopHeader())) {
    613       dominator_candidate->MarkAsLoopSuccessorDominator();
    614     }
    615     HControlInstruction* end = dominator_candidate->end();
    616     for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
    617       HBasicBlock* successor = it.Current();
    618       // Only count successors that remain inside the loop and don't loop back
    619       // to a loop header.
    620       if (successor->block_id() > dominator_candidate->block_id() &&
    621           successor->block_id() <= last->block_id()) {
    622         // Backwards edges must land on loop headers.
    623         DCHECK(successor->block_id() > dominator_candidate->block_id() ||
    624                successor->IsLoopHeader());
    625         outstanding_successors++;
    626       }
    627     }
    628   }
    629 }
    630 
    631 
    632 int HBasicBlock::PredecessorIndexOf(HBasicBlock* predecessor) const {
    633   for (int i = 0; i < predecessors_.length(); ++i) {
    634     if (predecessors_[i] == predecessor) return i;
    635   }
    636   UNREACHABLE();
    637   return -1;
    638 }
    639 
    640 
    641 #ifdef DEBUG
    642 void HBasicBlock::Verify() {
    643   // Check that every block is finished.
    644   DCHECK(IsFinished());
    645   DCHECK(block_id() >= 0);
    646 
    647   // Check that the incoming edges are in edge split form.
    648   if (predecessors_.length() > 1) {
    649     for (int i = 0; i < predecessors_.length(); ++i) {
    650       DCHECK(predecessors_[i]->end()->SecondSuccessor() == NULL);
    651     }
    652   }
    653 }
    654 #endif
    655 
    656 
    657 void HLoopInformation::RegisterBackEdge(HBasicBlock* block) {
    658   this->back_edges_.Add(block, block->zone());
    659   AddBlock(block);
    660 }
    661 
    662 
    663 HBasicBlock* HLoopInformation::GetLastBackEdge() const {
    664   int max_id = -1;
    665   HBasicBlock* result = NULL;
    666   for (int i = 0; i < back_edges_.length(); ++i) {
    667     HBasicBlock* cur = back_edges_[i];
    668     if (cur->block_id() > max_id) {
    669       max_id = cur->block_id();
    670       result = cur;
    671     }
    672   }
    673   return result;
    674 }
    675 
    676 
    677 void HLoopInformation::AddBlock(HBasicBlock* block) {
    678   if (block == loop_header()) return;
    679   if (block->parent_loop_header() == loop_header()) return;
    680   if (block->parent_loop_header() != NULL) {
    681     AddBlock(block->parent_loop_header());
    682   } else {
    683     block->set_parent_loop_header(loop_header());
    684     blocks_.Add(block, block->zone());
    685     for (int i = 0; i < block->predecessors()->length(); ++i) {
    686       AddBlock(block->predecessors()->at(i));
    687     }
    688   }
    689 }
    690 
    691 
    692 #ifdef DEBUG
    693 
    694 // Checks reachability of the blocks in this graph and stores a bit in
    695 // the BitVector "reachable()" for every block that can be reached
    696 // from the start block of the graph. If "dont_visit" is non-null, the given
    697 // block is treated as if it would not be part of the graph. "visited_count()"
    698 // returns the number of reachable blocks.
    699 class ReachabilityAnalyzer BASE_EMBEDDED {
    700  public:
    701   ReachabilityAnalyzer(HBasicBlock* entry_block,
    702                        int block_count,
    703                        HBasicBlock* dont_visit)
    704       : visited_count_(0),
    705         stack_(16, entry_block->zone()),
    706         reachable_(block_count, entry_block->zone()),
    707         dont_visit_(dont_visit) {
    708     PushBlock(entry_block);
    709     Analyze();
    710   }
    711 
    712   int visited_count() const { return visited_count_; }
    713   const BitVector* reachable() const { return &reachable_; }
    714 
    715  private:
    716   void PushBlock(HBasicBlock* block) {
    717     if (block != NULL && block != dont_visit_ &&
    718         !reachable_.Contains(block->block_id())) {
    719       reachable_.Add(block->block_id());
    720       stack_.Add(block, block->zone());
    721       visited_count_++;
    722     }
    723   }
    724 
    725   void Analyze() {
    726     while (!stack_.is_empty()) {
    727       HControlInstruction* end = stack_.RemoveLast()->end();
    728       for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
    729         PushBlock(it.Current());
    730       }
    731     }
    732   }
    733 
    734   int visited_count_;
    735   ZoneList<HBasicBlock*> stack_;
    736   BitVector reachable_;
    737   HBasicBlock* dont_visit_;
    738 };
    739 
    740 
    741 void HGraph::Verify(bool do_full_verify) const {
    742   Heap::RelocationLock relocation_lock(isolate()->heap());
    743   AllowHandleDereference allow_deref;
    744   AllowDeferredHandleDereference allow_deferred_deref;
    745   for (int i = 0; i < blocks_.length(); i++) {
    746     HBasicBlock* block = blocks_.at(i);
    747 
    748     block->Verify();
    749 
    750     // Check that every block contains at least one node and that only the last
    751     // node is a control instruction.
    752     HInstruction* current = block->first();
    753     DCHECK(current != NULL && current->IsBlockEntry());
    754     while (current != NULL) {
    755       DCHECK((current->next() == NULL) == current->IsControlInstruction());
    756       DCHECK(current->block() == block);
    757       current->Verify();
    758       current = current->next();
    759     }
    760 
    761     // Check that successors are correctly set.
    762     HBasicBlock* first = block->end()->FirstSuccessor();
    763     HBasicBlock* second = block->end()->SecondSuccessor();
    764     DCHECK(second == NULL || first != NULL);
    765 
    766     // Check that the predecessor array is correct.
    767     if (first != NULL) {
    768       DCHECK(first->predecessors()->Contains(block));
    769       if (second != NULL) {
    770         DCHECK(second->predecessors()->Contains(block));
    771       }
    772     }
    773 
    774     // Check that phis have correct arguments.
    775     for (int j = 0; j < block->phis()->length(); j++) {
    776       HPhi* phi = block->phis()->at(j);
    777       phi->Verify();
    778     }
    779 
    780     // Check that all join blocks have predecessors that end with an
    781     // unconditional goto and agree on their environment node id.
    782     if (block->predecessors()->length() >= 2) {
    783       BailoutId id =
    784           block->predecessors()->first()->last_environment()->ast_id();
    785       for (int k = 0; k < block->predecessors()->length(); k++) {
    786         HBasicBlock* predecessor = block->predecessors()->at(k);
    787         DCHECK(predecessor->end()->IsGoto() ||
    788                predecessor->end()->IsDeoptimize());
    789         DCHECK(predecessor->last_environment()->ast_id() == id);
    790       }
    791     }
    792   }
    793 
    794   // Check special property of first block to have no predecessors.
    795   DCHECK(blocks_.at(0)->predecessors()->is_empty());
    796 
    797   if (do_full_verify) {
    798     // Check that the graph is fully connected.
    799     ReachabilityAnalyzer analyzer(entry_block_, blocks_.length(), NULL);
    800     DCHECK(analyzer.visited_count() == blocks_.length());
    801 
    802     // Check that entry block dominator is NULL.
    803     DCHECK(entry_block_->dominator() == NULL);
    804 
    805     // Check dominators.
    806     for (int i = 0; i < blocks_.length(); ++i) {
    807       HBasicBlock* block = blocks_.at(i);
    808       if (block->dominator() == NULL) {
    809         // Only start block may have no dominator assigned to.
    810         DCHECK(i == 0);
    811       } else {
    812         // Assert that block is unreachable if dominator must not be visited.
    813         ReachabilityAnalyzer dominator_analyzer(entry_block_,
    814                                                 blocks_.length(),
    815                                                 block->dominator());
    816         DCHECK(!dominator_analyzer.reachable()->Contains(block->block_id()));
    817       }
    818     }
    819   }
    820 }
    821 
    822 #endif
    823 
    824 
    825 HConstant* HGraph::GetConstant(SetOncePointer<HConstant>* pointer,
    826                                int32_t value) {
    827   if (!pointer->is_set()) {
    828     // Can't pass GetInvalidContext() to HConstant::New, because that will
    829     // recursively call GetConstant
    830     HConstant* constant = HConstant::New(isolate(), zone(), NULL, value);
    831     constant->InsertAfter(entry_block()->first());
    832     pointer->set(constant);
    833     return constant;
    834   }
    835   return ReinsertConstantIfNecessary(pointer->get());
    836 }
    837 
    838 
    839 HConstant* HGraph::ReinsertConstantIfNecessary(HConstant* constant) {
    840   if (!constant->IsLinked()) {
    841     // The constant was removed from the graph. Reinsert.
    842     constant->ClearFlag(HValue::kIsDead);
    843     constant->InsertAfter(entry_block()->first());
    844   }
    845   return constant;
    846 }
    847 
    848 
    849 HConstant* HGraph::GetConstant0() {
    850   return GetConstant(&constant_0_, 0);
    851 }
    852 
    853 
    854 HConstant* HGraph::GetConstant1() {
    855   return GetConstant(&constant_1_, 1);
    856 }
    857 
    858 
    859 HConstant* HGraph::GetConstantMinus1() {
    860   return GetConstant(&constant_minus1_, -1);
    861 }
    862 
    863 
    864 HConstant* HGraph::GetConstantBool(bool value) {
    865   return value ? GetConstantTrue() : GetConstantFalse();
    866 }
    867 
    868 #define DEFINE_GET_CONSTANT(Name, name, constant, type, htype, boolean_value, \
    869                             undetectable)                                     \
    870   HConstant* HGraph::GetConstant##Name() {                                    \
    871     if (!constant_##name##_.is_set()) {                                       \
    872       HConstant* constant = new (zone()) HConstant(                           \
    873           Unique<Object>::CreateImmovable(isolate()->factory()->constant()),  \
    874           Unique<Map>::CreateImmovable(isolate()->factory()->type##_map()),   \
    875           false, Representation::Tagged(), htype, true, boolean_value,        \
    876           undetectable, ODDBALL_TYPE);                                        \
    877       constant->InsertAfter(entry_block()->first());                          \
    878       constant_##name##_.set(constant);                                       \
    879     }                                                                         \
    880     return ReinsertConstantIfNecessary(constant_##name##_.get());             \
    881   }
    882 
    883 DEFINE_GET_CONSTANT(Undefined, undefined, undefined_value, undefined,
    884                     HType::Undefined(), false, true)
    885 DEFINE_GET_CONSTANT(True, true, true_value, boolean, HType::Boolean(), true,
    886                     false)
    887 DEFINE_GET_CONSTANT(False, false, false_value, boolean, HType::Boolean(), false,
    888                     false)
    889 DEFINE_GET_CONSTANT(Hole, the_hole, the_hole_value, the_hole, HType::None(),
    890                     false, false)
    891 DEFINE_GET_CONSTANT(Null, null, null_value, null, HType::Null(), false, true)
    892 DEFINE_GET_CONSTANT(OptimizedOut, optimized_out, optimized_out, optimized_out,
    893                     HType::None(), false, false)
    894 
    895 #undef DEFINE_GET_CONSTANT
    896 
    897 #define DEFINE_IS_CONSTANT(Name, name)                                         \
    898 bool HGraph::IsConstant##Name(HConstant* constant) {                           \
    899   return constant_##name##_.is_set() && constant == constant_##name##_.get();  \
    900 }
    901 DEFINE_IS_CONSTANT(Undefined, undefined)
    902 DEFINE_IS_CONSTANT(0, 0)
    903 DEFINE_IS_CONSTANT(1, 1)
    904 DEFINE_IS_CONSTANT(Minus1, minus1)
    905 DEFINE_IS_CONSTANT(True, true)
    906 DEFINE_IS_CONSTANT(False, false)
    907 DEFINE_IS_CONSTANT(Hole, the_hole)
    908 DEFINE_IS_CONSTANT(Null, null)
    909 
    910 #undef DEFINE_IS_CONSTANT
    911 
    912 
    913 HConstant* HGraph::GetInvalidContext() {
    914   return GetConstant(&constant_invalid_context_, 0xFFFFC0C7);
    915 }
    916 
    917 
    918 bool HGraph::IsStandardConstant(HConstant* constant) {
    919   if (IsConstantUndefined(constant)) return true;
    920   if (IsConstant0(constant)) return true;
    921   if (IsConstant1(constant)) return true;
    922   if (IsConstantMinus1(constant)) return true;
    923   if (IsConstantTrue(constant)) return true;
    924   if (IsConstantFalse(constant)) return true;
    925   if (IsConstantHole(constant)) return true;
    926   if (IsConstantNull(constant)) return true;
    927   return false;
    928 }
    929 
    930 
    931 HGraphBuilder::IfBuilder::IfBuilder() : builder_(NULL), needs_compare_(true) {}
    932 
    933 
    934 HGraphBuilder::IfBuilder::IfBuilder(HGraphBuilder* builder)
    935     : needs_compare_(true) {
    936   Initialize(builder);
    937 }
    938 
    939 
    940 HGraphBuilder::IfBuilder::IfBuilder(HGraphBuilder* builder,
    941                                     HIfContinuation* continuation)
    942     : needs_compare_(false), first_true_block_(NULL), first_false_block_(NULL) {
    943   InitializeDontCreateBlocks(builder);
    944   continuation->Continue(&first_true_block_, &first_false_block_);
    945 }
    946 
    947 
    948 void HGraphBuilder::IfBuilder::InitializeDontCreateBlocks(
    949     HGraphBuilder* builder) {
    950   builder_ = builder;
    951   finished_ = false;
    952   did_then_ = false;
    953   did_else_ = false;
    954   did_else_if_ = false;
    955   did_and_ = false;
    956   did_or_ = false;
    957   captured_ = false;
    958   pending_merge_block_ = false;
    959   split_edge_merge_block_ = NULL;
    960   merge_at_join_blocks_ = NULL;
    961   normal_merge_at_join_block_count_ = 0;
    962   deopt_merge_at_join_block_count_ = 0;
    963 }
    964 
    965 
    966 void HGraphBuilder::IfBuilder::Initialize(HGraphBuilder* builder) {
    967   InitializeDontCreateBlocks(builder);
    968   HEnvironment* env = builder->environment();
    969   first_true_block_ = builder->CreateBasicBlock(env->Copy());
    970   first_false_block_ = builder->CreateBasicBlock(env->Copy());
    971 }
    972 
    973 
    974 HControlInstruction* HGraphBuilder::IfBuilder::AddCompare(
    975     HControlInstruction* compare) {
    976   DCHECK(did_then_ == did_else_);
    977   if (did_else_) {
    978     // Handle if-then-elseif
    979     did_else_if_ = true;
    980     did_else_ = false;
    981     did_then_ = false;
    982     did_and_ = false;
    983     did_or_ = false;
    984     pending_merge_block_ = false;
    985     split_edge_merge_block_ = NULL;
    986     HEnvironment* env = builder()->environment();
    987     first_true_block_ = builder()->CreateBasicBlock(env->Copy());
    988     first_false_block_ = builder()->CreateBasicBlock(env->Copy());
    989   }
    990   if (split_edge_merge_block_ != NULL) {
    991     HEnvironment* env = first_false_block_->last_environment();
    992     HBasicBlock* split_edge = builder()->CreateBasicBlock(env->Copy());
    993     if (did_or_) {
    994       compare->SetSuccessorAt(0, split_edge);
    995       compare->SetSuccessorAt(1, first_false_block_);
    996     } else {
    997       compare->SetSuccessorAt(0, first_true_block_);
    998       compare->SetSuccessorAt(1, split_edge);
    999     }
   1000     builder()->GotoNoSimulate(split_edge, split_edge_merge_block_);
   1001   } else {
   1002     compare->SetSuccessorAt(0, first_true_block_);
   1003     compare->SetSuccessorAt(1, first_false_block_);
   1004   }
   1005   builder()->FinishCurrentBlock(compare);
   1006   needs_compare_ = false;
   1007   return compare;
   1008 }
   1009 
   1010 
   1011 void HGraphBuilder::IfBuilder::Or() {
   1012   DCHECK(!needs_compare_);
   1013   DCHECK(!did_and_);
   1014   did_or_ = true;
   1015   HEnvironment* env = first_false_block_->last_environment();
   1016   if (split_edge_merge_block_ == NULL) {
   1017     split_edge_merge_block_ = builder()->CreateBasicBlock(env->Copy());
   1018     builder()->GotoNoSimulate(first_true_block_, split_edge_merge_block_);
   1019     first_true_block_ = split_edge_merge_block_;
   1020   }
   1021   builder()->set_current_block(first_false_block_);
   1022   first_false_block_ = builder()->CreateBasicBlock(env->Copy());
   1023 }
   1024 
   1025 
   1026 void HGraphBuilder::IfBuilder::And() {
   1027   DCHECK(!needs_compare_);
   1028   DCHECK(!did_or_);
   1029   did_and_ = true;
   1030   HEnvironment* env = first_false_block_->last_environment();
   1031   if (split_edge_merge_block_ == NULL) {
   1032     split_edge_merge_block_ = builder()->CreateBasicBlock(env->Copy());
   1033     builder()->GotoNoSimulate(first_false_block_, split_edge_merge_block_);
   1034     first_false_block_ = split_edge_merge_block_;
   1035   }
   1036   builder()->set_current_block(first_true_block_);
   1037   first_true_block_ = builder()->CreateBasicBlock(env->Copy());
   1038 }
   1039 
   1040 
   1041 void HGraphBuilder::IfBuilder::CaptureContinuation(
   1042     HIfContinuation* continuation) {
   1043   DCHECK(!did_else_if_);
   1044   DCHECK(!finished_);
   1045   DCHECK(!captured_);
   1046 
   1047   HBasicBlock* true_block = NULL;
   1048   HBasicBlock* false_block = NULL;
   1049   Finish(&true_block, &false_block);
   1050   DCHECK(true_block != NULL);
   1051   DCHECK(false_block != NULL);
   1052   continuation->Capture(true_block, false_block);
   1053   captured_ = true;
   1054   builder()->set_current_block(NULL);
   1055   End();
   1056 }
   1057 
   1058 
   1059 void HGraphBuilder::IfBuilder::JoinContinuation(HIfContinuation* continuation) {
   1060   DCHECK(!did_else_if_);
   1061   DCHECK(!finished_);
   1062   DCHECK(!captured_);
   1063   HBasicBlock* true_block = NULL;
   1064   HBasicBlock* false_block = NULL;
   1065   Finish(&true_block, &false_block);
   1066   merge_at_join_blocks_ = NULL;
   1067   if (true_block != NULL && !true_block->IsFinished()) {
   1068     DCHECK(continuation->IsTrueReachable());
   1069     builder()->GotoNoSimulate(true_block, continuation->true_branch());
   1070   }
   1071   if (false_block != NULL && !false_block->IsFinished()) {
   1072     DCHECK(continuation->IsFalseReachable());
   1073     builder()->GotoNoSimulate(false_block, continuation->false_branch());
   1074   }
   1075   captured_ = true;
   1076   End();
   1077 }
   1078 
   1079 
   1080 void HGraphBuilder::IfBuilder::Then() {
   1081   DCHECK(!captured_);
   1082   DCHECK(!finished_);
   1083   did_then_ = true;
   1084   if (needs_compare_) {
   1085     // Handle if's without any expressions, they jump directly to the "else"
   1086     // branch. However, we must pretend that the "then" branch is reachable,
   1087     // so that the graph builder visits it and sees any live range extending
   1088     // constructs within it.
   1089     HConstant* constant_false = builder()->graph()->GetConstantFalse();
   1090     ToBooleanHints boolean_type = ToBooleanHint::kBoolean;
   1091     HBranch* branch = builder()->New<HBranch>(
   1092         constant_false, boolean_type, first_true_block_, first_false_block_);
   1093     builder()->FinishCurrentBlock(branch);
   1094   }
   1095   builder()->set_current_block(first_true_block_);
   1096   pending_merge_block_ = true;
   1097 }
   1098 
   1099 
   1100 void HGraphBuilder::IfBuilder::Else() {
   1101   DCHECK(did_then_);
   1102   DCHECK(!captured_);
   1103   DCHECK(!finished_);
   1104   AddMergeAtJoinBlock(false);
   1105   builder()->set_current_block(first_false_block_);
   1106   pending_merge_block_ = true;
   1107   did_else_ = true;
   1108 }
   1109 
   1110 void HGraphBuilder::IfBuilder::Deopt(DeoptimizeReason reason) {
   1111   DCHECK(did_then_);
   1112   builder()->Add<HDeoptimize>(reason, Deoptimizer::EAGER);
   1113   AddMergeAtJoinBlock(true);
   1114 }
   1115 
   1116 
   1117 void HGraphBuilder::IfBuilder::Return(HValue* value) {
   1118   HValue* parameter_count = builder()->graph()->GetConstantMinus1();
   1119   builder()->FinishExitCurrentBlock(
   1120       builder()->New<HReturn>(value, parameter_count));
   1121   AddMergeAtJoinBlock(false);
   1122 }
   1123 
   1124 
   1125 void HGraphBuilder::IfBuilder::AddMergeAtJoinBlock(bool deopt) {
   1126   if (!pending_merge_block_) return;
   1127   HBasicBlock* block = builder()->current_block();
   1128   DCHECK(block == NULL || !block->IsFinished());
   1129   MergeAtJoinBlock* record = new (builder()->zone())
   1130       MergeAtJoinBlock(block, deopt, merge_at_join_blocks_);
   1131   merge_at_join_blocks_ = record;
   1132   if (block != NULL) {
   1133     DCHECK(block->end() == NULL);
   1134     if (deopt) {
   1135       normal_merge_at_join_block_count_++;
   1136     } else {
   1137       deopt_merge_at_join_block_count_++;
   1138     }
   1139   }
   1140   builder()->set_current_block(NULL);
   1141   pending_merge_block_ = false;
   1142 }
   1143 
   1144 
   1145 void HGraphBuilder::IfBuilder::Finish() {
   1146   DCHECK(!finished_);
   1147   if (!did_then_) {
   1148     Then();
   1149   }
   1150   AddMergeAtJoinBlock(false);
   1151   if (!did_else_) {
   1152     Else();
   1153     AddMergeAtJoinBlock(false);
   1154   }
   1155   finished_ = true;
   1156 }
   1157 
   1158 
   1159 void HGraphBuilder::IfBuilder::Finish(HBasicBlock** then_continuation,
   1160                                       HBasicBlock** else_continuation) {
   1161   Finish();
   1162 
   1163   MergeAtJoinBlock* else_record = merge_at_join_blocks_;
   1164   if (else_continuation != NULL) {
   1165     *else_continuation = else_record->block_;
   1166   }
   1167   MergeAtJoinBlock* then_record = else_record->next_;
   1168   if (then_continuation != NULL) {
   1169     *then_continuation = then_record->block_;
   1170   }
   1171   DCHECK(then_record->next_ == NULL);
   1172 }
   1173 
   1174 
   1175 void HGraphBuilder::IfBuilder::EndUnreachable() {
   1176   if (captured_) return;
   1177   Finish();
   1178   builder()->set_current_block(nullptr);
   1179 }
   1180 
   1181 
   1182 void HGraphBuilder::IfBuilder::End() {
   1183   if (captured_) return;
   1184   Finish();
   1185 
   1186   int total_merged_blocks = normal_merge_at_join_block_count_ +
   1187     deopt_merge_at_join_block_count_;
   1188   DCHECK(total_merged_blocks >= 1);
   1189   HBasicBlock* merge_block =
   1190       total_merged_blocks == 1 ? NULL : builder()->graph()->CreateBasicBlock();
   1191 
   1192   // Merge non-deopt blocks first to ensure environment has right size for
   1193   // padding.
   1194   MergeAtJoinBlock* current = merge_at_join_blocks_;
   1195   while (current != NULL) {
   1196     if (!current->deopt_ && current->block_ != NULL) {
   1197       // If there is only one block that makes it through to the end of the
   1198       // if, then just set it as the current block and continue rather then
   1199       // creating an unnecessary merge block.
   1200       if (total_merged_blocks == 1) {
   1201         builder()->set_current_block(current->block_);
   1202         return;
   1203       }
   1204       builder()->GotoNoSimulate(current->block_, merge_block);
   1205     }
   1206     current = current->next_;
   1207   }
   1208 
   1209   // Merge deopt blocks, padding when necessary.
   1210   current = merge_at_join_blocks_;
   1211   while (current != NULL) {
   1212     if (current->deopt_ && current->block_ != NULL) {
   1213       current->block_->FinishExit(
   1214           HAbnormalExit::New(builder()->isolate(), builder()->zone(), NULL),
   1215           SourcePosition::Unknown());
   1216     }
   1217     current = current->next_;
   1218   }
   1219   builder()->set_current_block(merge_block);
   1220 }
   1221 
   1222 
   1223 HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder) {
   1224   Initialize(builder, NULL, kWhileTrue, NULL);
   1225 }
   1226 
   1227 
   1228 HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder, HValue* context,
   1229                                         LoopBuilder::Direction direction) {
   1230   Initialize(builder, context, direction, builder->graph()->GetConstant1());
   1231 }
   1232 
   1233 
   1234 HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder, HValue* context,
   1235                                         LoopBuilder::Direction direction,
   1236                                         HValue* increment_amount) {
   1237   Initialize(builder, context, direction, increment_amount);
   1238   increment_amount_ = increment_amount;
   1239 }
   1240 
   1241 
   1242 void HGraphBuilder::LoopBuilder::Initialize(HGraphBuilder* builder,
   1243                                             HValue* context,
   1244                                             Direction direction,
   1245                                             HValue* increment_amount) {
   1246   builder_ = builder;
   1247   context_ = context;
   1248   direction_ = direction;
   1249   increment_amount_ = increment_amount;
   1250 
   1251   finished_ = false;
   1252   header_block_ = builder->CreateLoopHeaderBlock();
   1253   body_block_ = NULL;
   1254   exit_block_ = NULL;
   1255   exit_trampoline_block_ = NULL;
   1256 }
   1257 
   1258 
   1259 HValue* HGraphBuilder::LoopBuilder::BeginBody(
   1260     HValue* initial,
   1261     HValue* terminating,
   1262     Token::Value token) {
   1263   DCHECK(direction_ != kWhileTrue);
   1264   HEnvironment* env = builder_->environment();
   1265   phi_ = header_block_->AddNewPhi(env->values()->length());
   1266   phi_->AddInput(initial);
   1267   env->Push(initial);
   1268   builder_->GotoNoSimulate(header_block_);
   1269 
   1270   HEnvironment* body_env = env->Copy();
   1271   HEnvironment* exit_env = env->Copy();
   1272   // Remove the phi from the expression stack
   1273   body_env->Pop();
   1274   exit_env->Pop();
   1275   body_block_ = builder_->CreateBasicBlock(body_env);
   1276   exit_block_ = builder_->CreateBasicBlock(exit_env);
   1277 
   1278   builder_->set_current_block(header_block_);
   1279   env->Pop();
   1280   builder_->FinishCurrentBlock(builder_->New<HCompareNumericAndBranch>(
   1281           phi_, terminating, token, body_block_, exit_block_));
   1282 
   1283   builder_->set_current_block(body_block_);
   1284   if (direction_ == kPreIncrement || direction_ == kPreDecrement) {
   1285     Isolate* isolate = builder_->isolate();
   1286     HValue* one = builder_->graph()->GetConstant1();
   1287     if (direction_ == kPreIncrement) {
   1288       increment_ = HAdd::New(isolate, zone(), context_, phi_, one);
   1289     } else {
   1290       increment_ = HSub::New(isolate, zone(), context_, phi_, one);
   1291     }
   1292     increment_->ClearFlag(HValue::kCanOverflow);
   1293     builder_->AddInstruction(increment_);
   1294     return increment_;
   1295   } else {
   1296     return phi_;
   1297   }
   1298 }
   1299 
   1300 
   1301 void HGraphBuilder::LoopBuilder::BeginBody(int drop_count) {
   1302   DCHECK(direction_ == kWhileTrue);
   1303   HEnvironment* env = builder_->environment();
   1304   builder_->GotoNoSimulate(header_block_);
   1305   builder_->set_current_block(header_block_);
   1306   env->Drop(drop_count);
   1307 }
   1308 
   1309 
   1310 void HGraphBuilder::LoopBuilder::Break() {
   1311   if (exit_trampoline_block_ == NULL) {
   1312     // Its the first time we saw a break.
   1313     if (direction_ == kWhileTrue) {
   1314       HEnvironment* env = builder_->environment()->Copy();
   1315       exit_trampoline_block_ = builder_->CreateBasicBlock(env);
   1316     } else {
   1317       HEnvironment* env = exit_block_->last_environment()->Copy();
   1318       exit_trampoline_block_ = builder_->CreateBasicBlock(env);
   1319       builder_->GotoNoSimulate(exit_block_, exit_trampoline_block_);
   1320     }
   1321   }
   1322 
   1323   builder_->GotoNoSimulate(exit_trampoline_block_);
   1324   builder_->set_current_block(NULL);
   1325 }
   1326 
   1327 
   1328 void HGraphBuilder::LoopBuilder::EndBody() {
   1329   DCHECK(!finished_);
   1330 
   1331   if (direction_ == kPostIncrement || direction_ == kPostDecrement) {
   1332     Isolate* isolate = builder_->isolate();
   1333     if (direction_ == kPostIncrement) {
   1334       increment_ =
   1335           HAdd::New(isolate, zone(), context_, phi_, increment_amount_);
   1336     } else {
   1337       increment_ =
   1338           HSub::New(isolate, zone(), context_, phi_, increment_amount_);
   1339     }
   1340     increment_->ClearFlag(HValue::kCanOverflow);
   1341     builder_->AddInstruction(increment_);
   1342   }
   1343 
   1344   if (direction_ != kWhileTrue) {
   1345     // Push the new increment value on the expression stack to merge into
   1346     // the phi.
   1347     builder_->environment()->Push(increment_);
   1348   }
   1349   HBasicBlock* last_block = builder_->current_block();
   1350   builder_->GotoNoSimulate(last_block, header_block_);
   1351   header_block_->loop_information()->RegisterBackEdge(last_block);
   1352 
   1353   if (exit_trampoline_block_ != NULL) {
   1354     builder_->set_current_block(exit_trampoline_block_);
   1355   } else {
   1356     builder_->set_current_block(exit_block_);
   1357   }
   1358   finished_ = true;
   1359 }
   1360 
   1361 
   1362 HGraph* HGraphBuilder::CreateGraph() {
   1363   DCHECK(!FLAG_minimal);
   1364   graph_ = new (zone()) HGraph(info_, descriptor_);
   1365   if (FLAG_hydrogen_stats) isolate()->GetHStatistics()->Initialize(info_);
   1366   if (!info_->IsStub() && is_tracking_positions()) {
   1367     TraceInlinedFunction(info_->shared_info(), SourcePosition::Unknown(),
   1368                          SourcePosition::kNotInlined);
   1369   }
   1370   CompilationPhase phase("H_Block building", info_);
   1371   set_current_block(graph()->entry_block());
   1372   if (!BuildGraph()) return NULL;
   1373   graph()->FinalizeUniqueness();
   1374   return graph_;
   1375 }
   1376 
   1377 void HGraphBuilder::TraceInlinedFunction(Handle<SharedFunctionInfo> shared,
   1378                                          SourcePosition position,
   1379                                          int inlining_id) {
   1380   DCHECK(is_tracking_positions());
   1381 
   1382   if (!shared->script()->IsUndefined(isolate())) {
   1383     Handle<Script> script(Script::cast(shared->script()), isolate());
   1384 
   1385     if (FLAG_hydrogen_track_positions &&
   1386         !script->source()->IsUndefined(isolate())) {
   1387       CodeTracer::Scope tracing_scope(isolate()->GetCodeTracer());
   1388       Object* source_name = script->name();
   1389       OFStream os(tracing_scope.file());
   1390       os << "--- FUNCTION SOURCE (";
   1391       if (source_name->IsString()) {
   1392         os << String::cast(source_name)->ToCString().get() << ":";
   1393       }
   1394       os << shared->DebugName()->ToCString().get() << ") id{";
   1395       os << info_->optimization_id() << "," << inlining_id << "} ---\n";
   1396       {
   1397         DisallowHeapAllocation no_allocation;
   1398         int start = shared->start_position();
   1399         int len = shared->end_position() - start;
   1400         String::SubStringRange source(String::cast(script->source()), start,
   1401                                       len);
   1402         for (const auto& c : source) {
   1403           os << AsReversiblyEscapedUC16(c);
   1404         }
   1405       }
   1406 
   1407       os << "\n--- END ---\n";
   1408     }
   1409   }
   1410 
   1411   if (FLAG_hydrogen_track_positions &&
   1412       inlining_id != SourcePosition::kNotInlined) {
   1413     CodeTracer::Scope tracing_scope(isolate()->GetCodeTracer());
   1414     OFStream os(tracing_scope.file());
   1415     os << "INLINE (" << shared->DebugName()->ToCString().get() << ") id{"
   1416        << info_->optimization_id() << "," << inlining_id << "} AS "
   1417        << inlining_id << " AT " << position.ScriptOffset() << std::endl;
   1418   }
   1419 }
   1420 
   1421 HInstruction* HGraphBuilder::AddInstruction(HInstruction* instr) {
   1422   DCHECK(current_block() != NULL);
   1423   DCHECK(!FLAG_hydrogen_track_positions || position_.IsKnown() ||
   1424          !info_->IsOptimizing());
   1425   current_block()->AddInstruction(instr, source_position());
   1426   if (graph()->IsInsideNoSideEffectsScope()) {
   1427     instr->SetFlag(HValue::kHasNoObservableSideEffects);
   1428   }
   1429   return instr;
   1430 }
   1431 
   1432 
   1433 void HGraphBuilder::FinishCurrentBlock(HControlInstruction* last) {
   1434   DCHECK(!FLAG_hydrogen_track_positions || !info_->IsOptimizing() ||
   1435          position_.IsKnown());
   1436   current_block()->Finish(last, source_position());
   1437   if (last->IsReturn() || last->IsAbnormalExit()) {
   1438     set_current_block(NULL);
   1439   }
   1440 }
   1441 
   1442 
   1443 void HGraphBuilder::FinishExitCurrentBlock(HControlInstruction* instruction) {
   1444   DCHECK(!FLAG_hydrogen_track_positions || !info_->IsOptimizing() ||
   1445          position_.IsKnown());
   1446   current_block()->FinishExit(instruction, source_position());
   1447   if (instruction->IsReturn() || instruction->IsAbnormalExit()) {
   1448     set_current_block(NULL);
   1449   }
   1450 }
   1451 
   1452 
   1453 void HGraphBuilder::AddIncrementCounter(StatsCounter* counter) {
   1454   if (FLAG_native_code_counters && counter->Enabled()) {
   1455     HValue* reference = Add<HConstant>(ExternalReference(counter));
   1456     HValue* old_value =
   1457         Add<HLoadNamedField>(reference, nullptr, HObjectAccess::ForCounter());
   1458     HValue* new_value = AddUncasted<HAdd>(old_value, graph()->GetConstant1());
   1459     new_value->ClearFlag(HValue::kCanOverflow);  // Ignore counter overflow
   1460     Add<HStoreNamedField>(reference, HObjectAccess::ForCounter(),
   1461                           new_value, STORE_TO_INITIALIZED_ENTRY);
   1462   }
   1463 }
   1464 
   1465 
   1466 void HGraphBuilder::AddSimulate(BailoutId id,
   1467                                 RemovableSimulate removable) {
   1468   DCHECK(current_block() != NULL);
   1469   DCHECK(!graph()->IsInsideNoSideEffectsScope());
   1470   current_block()->AddNewSimulate(id, source_position(), removable);
   1471 }
   1472 
   1473 
   1474 HBasicBlock* HGraphBuilder::CreateBasicBlock(HEnvironment* env) {
   1475   HBasicBlock* b = graph()->CreateBasicBlock();
   1476   b->SetInitialEnvironment(env);
   1477   return b;
   1478 }
   1479 
   1480 
   1481 HBasicBlock* HGraphBuilder::CreateLoopHeaderBlock() {
   1482   HBasicBlock* header = graph()->CreateBasicBlock();
   1483   HEnvironment* entry_env = environment()->CopyAsLoopHeader(header);
   1484   header->SetInitialEnvironment(entry_env);
   1485   header->AttachLoopInformation();
   1486   return header;
   1487 }
   1488 
   1489 
   1490 HValue* HGraphBuilder::BuildGetElementsKind(HValue* object) {
   1491   HValue* map = Add<HLoadNamedField>(object, nullptr, HObjectAccess::ForMap());
   1492 
   1493   HValue* bit_field2 =
   1494       Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField2());
   1495   return BuildDecodeField<Map::ElementsKindBits>(bit_field2);
   1496 }
   1497 
   1498 
   1499 HValue* HGraphBuilder::BuildEnumLength(HValue* map) {
   1500   NoObservableSideEffectsScope scope(this);
   1501   HValue* bit_field3 =
   1502       Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField3());
   1503   return BuildDecodeField<Map::EnumLengthBits>(bit_field3);
   1504 }
   1505 
   1506 
   1507 HValue* HGraphBuilder::BuildCheckHeapObject(HValue* obj) {
   1508   if (obj->type().IsHeapObject()) return obj;
   1509   return Add<HCheckHeapObject>(obj);
   1510 }
   1511 
   1512 void HGraphBuilder::FinishExitWithHardDeoptimization(DeoptimizeReason reason) {
   1513   Add<HDeoptimize>(reason, Deoptimizer::EAGER);
   1514   FinishExitCurrentBlock(New<HAbnormalExit>());
   1515 }
   1516 
   1517 
   1518 HValue* HGraphBuilder::BuildCheckString(HValue* string) {
   1519   if (!string->type().IsString()) {
   1520     DCHECK(!string->IsConstant() ||
   1521            !HConstant::cast(string)->HasStringValue());
   1522     BuildCheckHeapObject(string);
   1523     return Add<HCheckInstanceType>(string, HCheckInstanceType::IS_STRING);
   1524   }
   1525   return string;
   1526 }
   1527 
   1528 HValue* HGraphBuilder::BuildWrapReceiver(HValue* object, HValue* checked) {
   1529   if (object->type().IsJSObject()) return object;
   1530   HValue* function = checked->ActualValue();
   1531   if (function->IsConstant() &&
   1532       HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
   1533     Handle<JSFunction> f = Handle<JSFunction>::cast(
   1534         HConstant::cast(function)->handle(isolate()));
   1535     SharedFunctionInfo* shared = f->shared();
   1536     if (is_strict(shared->language_mode()) || shared->native()) return object;
   1537   }
   1538   return Add<HWrapReceiver>(object, checked);
   1539 }
   1540 
   1541 
   1542 HValue* HGraphBuilder::BuildCheckAndGrowElementsCapacity(
   1543     HValue* object, HValue* elements, ElementsKind kind, HValue* length,
   1544     HValue* capacity, HValue* key) {
   1545   HValue* max_gap = Add<HConstant>(static_cast<int32_t>(JSObject::kMaxGap));
   1546   HValue* max_capacity = AddUncasted<HAdd>(capacity, max_gap);
   1547   Add<HBoundsCheck>(key, max_capacity);
   1548 
   1549   HValue* new_capacity = BuildNewElementsCapacity(key);
   1550   HValue* new_elements = BuildGrowElementsCapacity(object, elements, kind, kind,
   1551                                                    length, new_capacity);
   1552   return new_elements;
   1553 }
   1554 
   1555 
   1556 HValue* HGraphBuilder::BuildCheckForCapacityGrow(
   1557     HValue* object,
   1558     HValue* elements,
   1559     ElementsKind kind,
   1560     HValue* length,
   1561     HValue* key,
   1562     bool is_js_array,
   1563     PropertyAccessType access_type) {
   1564   IfBuilder length_checker(this);
   1565 
   1566   Token::Value token = IsHoleyElementsKind(kind) ? Token::GTE : Token::EQ;
   1567   length_checker.If<HCompareNumericAndBranch>(key, length, token);
   1568 
   1569   length_checker.Then();
   1570 
   1571   HValue* current_capacity = AddLoadFixedArrayLength(elements);
   1572 
   1573   if (top_info()->IsStub()) {
   1574     IfBuilder capacity_checker(this);
   1575     capacity_checker.If<HCompareNumericAndBranch>(key, current_capacity,
   1576                                                   Token::GTE);
   1577     capacity_checker.Then();
   1578     HValue* new_elements = BuildCheckAndGrowElementsCapacity(
   1579         object, elements, kind, length, current_capacity, key);
   1580     environment()->Push(new_elements);
   1581     capacity_checker.Else();
   1582     environment()->Push(elements);
   1583     capacity_checker.End();
   1584   } else {
   1585     HValue* result = Add<HMaybeGrowElements>(
   1586         object, elements, key, current_capacity, is_js_array, kind);
   1587     environment()->Push(result);
   1588   }
   1589 
   1590   if (is_js_array) {
   1591     HValue* new_length = AddUncasted<HAdd>(key, graph_->GetConstant1());
   1592     new_length->ClearFlag(HValue::kCanOverflow);
   1593 
   1594     Add<HStoreNamedField>(object, HObjectAccess::ForArrayLength(kind),
   1595                           new_length);
   1596   }
   1597 
   1598   if (access_type == STORE && kind == FAST_SMI_ELEMENTS) {
   1599     HValue* checked_elements = environment()->Top();
   1600 
   1601     // Write zero to ensure that the new element is initialized with some smi.
   1602     Add<HStoreKeyed>(checked_elements, key, graph()->GetConstant0(), nullptr,
   1603                      kind);
   1604   }
   1605 
   1606   length_checker.Else();
   1607   Add<HBoundsCheck>(key, length);
   1608 
   1609   environment()->Push(elements);
   1610   length_checker.End();
   1611 
   1612   return environment()->Pop();
   1613 }
   1614 
   1615 
   1616 HValue* HGraphBuilder::BuildCopyElementsOnWrite(HValue* object,
   1617                                                 HValue* elements,
   1618                                                 ElementsKind kind,
   1619                                                 HValue* length) {
   1620   Factory* factory = isolate()->factory();
   1621 
   1622   IfBuilder cow_checker(this);
   1623 
   1624   cow_checker.If<HCompareMap>(elements, factory->fixed_cow_array_map());
   1625   cow_checker.Then();
   1626 
   1627   HValue* capacity = AddLoadFixedArrayLength(elements);
   1628 
   1629   HValue* new_elements = BuildGrowElementsCapacity(object, elements, kind,
   1630                                                    kind, length, capacity);
   1631 
   1632   environment()->Push(new_elements);
   1633 
   1634   cow_checker.Else();
   1635 
   1636   environment()->Push(elements);
   1637 
   1638   cow_checker.End();
   1639 
   1640   return environment()->Pop();
   1641 }
   1642 
   1643 HValue* HGraphBuilder::BuildElementIndexHash(HValue* index) {
   1644   int32_t seed_value = static_cast<uint32_t>(isolate()->heap()->HashSeed());
   1645   HValue* seed = Add<HConstant>(seed_value);
   1646   HValue* hash = AddUncasted<HBitwise>(Token::BIT_XOR, index, seed);
   1647 
   1648   // hash = ~hash + (hash << 15);
   1649   HValue* shifted_hash = AddUncasted<HShl>(hash, Add<HConstant>(15));
   1650   HValue* not_hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash,
   1651                                            graph()->GetConstantMinus1());
   1652   hash = AddUncasted<HAdd>(shifted_hash, not_hash);
   1653 
   1654   // hash = hash ^ (hash >> 12);
   1655   shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(12));
   1656   hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
   1657 
   1658   // hash = hash + (hash << 2);
   1659   shifted_hash = AddUncasted<HShl>(hash, Add<HConstant>(2));
   1660   hash = AddUncasted<HAdd>(hash, shifted_hash);
   1661 
   1662   // hash = hash ^ (hash >> 4);
   1663   shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(4));
   1664   hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
   1665 
   1666   // hash = hash * 2057;
   1667   hash = AddUncasted<HMul>(hash, Add<HConstant>(2057));
   1668   hash->ClearFlag(HValue::kCanOverflow);
   1669 
   1670   // hash = hash ^ (hash >> 16);
   1671   shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(16));
   1672   return AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
   1673 }
   1674 
   1675 HValue* HGraphBuilder::BuildUncheckedDictionaryElementLoad(HValue* receiver,
   1676                                                            HValue* elements,
   1677                                                            HValue* key,
   1678                                                            HValue* hash) {
   1679   HValue* capacity =
   1680       Add<HLoadKeyed>(elements, Add<HConstant>(NameDictionary::kCapacityIndex),
   1681                       nullptr, nullptr, FAST_ELEMENTS);
   1682 
   1683   HValue* mask = AddUncasted<HSub>(capacity, graph()->GetConstant1());
   1684   mask->ChangeRepresentation(Representation::Integer32());
   1685   mask->ClearFlag(HValue::kCanOverflow);
   1686 
   1687   HValue* entry = hash;
   1688   HValue* count = graph()->GetConstant1();
   1689   Push(entry);
   1690   Push(count);
   1691 
   1692   HIfContinuation return_or_loop_continuation(graph()->CreateBasicBlock(),
   1693                                               graph()->CreateBasicBlock());
   1694   HIfContinuation found_key_match_continuation(graph()->CreateBasicBlock(),
   1695                                                graph()->CreateBasicBlock());
   1696   LoopBuilder probe_loop(this);
   1697   probe_loop.BeginBody(2);  // Drop entry, count from last environment to
   1698                             // appease live range building without simulates.
   1699 
   1700   count = Pop();
   1701   entry = Pop();
   1702   entry = AddUncasted<HBitwise>(Token::BIT_AND, entry, mask);
   1703   int entry_size = SeededNumberDictionary::kEntrySize;
   1704   HValue* base_index = AddUncasted<HMul>(entry, Add<HConstant>(entry_size));
   1705   base_index->ClearFlag(HValue::kCanOverflow);
   1706   int start_offset = SeededNumberDictionary::kElementsStartIndex;
   1707   HValue* key_index =
   1708       AddUncasted<HAdd>(base_index, Add<HConstant>(start_offset));
   1709   key_index->ClearFlag(HValue::kCanOverflow);
   1710 
   1711   HValue* candidate_key =
   1712       Add<HLoadKeyed>(elements, key_index, nullptr, nullptr, FAST_ELEMENTS);
   1713   IfBuilder if_undefined(this);
   1714   if_undefined.If<HCompareObjectEqAndBranch>(candidate_key,
   1715                                              graph()->GetConstantUndefined());
   1716   if_undefined.Then();
   1717   {
   1718     // element == undefined means "not found". Call the runtime.
   1719     // TODO(jkummerow): walk the prototype chain instead.
   1720     Add<HPushArguments>(receiver, key);
   1721     Push(Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kKeyedGetProperty),
   1722                            2));
   1723   }
   1724   if_undefined.Else();
   1725   {
   1726     IfBuilder if_match(this);
   1727     if_match.If<HCompareObjectEqAndBranch>(candidate_key, key);
   1728     if_match.Then();
   1729     if_match.Else();
   1730 
   1731     // Update non-internalized string in the dictionary with internalized key?
   1732     IfBuilder if_update_with_internalized(this);
   1733     HValue* smi_check =
   1734         if_update_with_internalized.IfNot<HIsSmiAndBranch>(candidate_key);
   1735     if_update_with_internalized.And();
   1736     HValue* map = AddLoadMap(candidate_key, smi_check);
   1737     HValue* instance_type =
   1738         Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapInstanceType());
   1739     HValue* not_internalized_bit = AddUncasted<HBitwise>(
   1740         Token::BIT_AND, instance_type,
   1741         Add<HConstant>(static_cast<int>(kIsNotInternalizedMask)));
   1742     if_update_with_internalized.If<HCompareNumericAndBranch>(
   1743         not_internalized_bit, graph()->GetConstant0(), Token::NE);
   1744     if_update_with_internalized.And();
   1745     if_update_with_internalized.IfNot<HCompareObjectEqAndBranch>(
   1746         candidate_key, graph()->GetConstantHole());
   1747     if_update_with_internalized.AndIf<HStringCompareAndBranch>(candidate_key,
   1748                                                                key, Token::EQ);
   1749     if_update_with_internalized.Then();
   1750     // Replace a key that is a non-internalized string by the equivalent
   1751     // internalized string for faster further lookups.
   1752     Add<HStoreKeyed>(elements, key_index, key, nullptr, FAST_ELEMENTS);
   1753     if_update_with_internalized.Else();
   1754 
   1755     if_update_with_internalized.JoinContinuation(&found_key_match_continuation);
   1756     if_match.JoinContinuation(&found_key_match_continuation);
   1757 
   1758     IfBuilder found_key_match(this, &found_key_match_continuation);
   1759     found_key_match.Then();
   1760     // Key at current probe matches. Relevant bits in the |details| field must
   1761     // be zero, otherwise the dictionary element requires special handling.
   1762     HValue* details_index =
   1763         AddUncasted<HAdd>(base_index, Add<HConstant>(start_offset + 2));
   1764     details_index->ClearFlag(HValue::kCanOverflow);
   1765     HValue* details = Add<HLoadKeyed>(elements, details_index, nullptr, nullptr,
   1766                                       FAST_ELEMENTS);
   1767     int details_mask = PropertyDetails::TypeField::kMask;
   1768     details = AddUncasted<HBitwise>(Token::BIT_AND, details,
   1769                                     Add<HConstant>(details_mask));
   1770     IfBuilder details_compare(this);
   1771     details_compare.If<HCompareNumericAndBranch>(
   1772         details, graph()->GetConstant0(), Token::EQ);
   1773     details_compare.Then();
   1774     HValue* result_index =
   1775         AddUncasted<HAdd>(base_index, Add<HConstant>(start_offset + 1));
   1776     result_index->ClearFlag(HValue::kCanOverflow);
   1777     Push(Add<HLoadKeyed>(elements, result_index, nullptr, nullptr,
   1778                          FAST_ELEMENTS));
   1779     details_compare.Else();
   1780     Add<HPushArguments>(receiver, key);
   1781     Push(Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kKeyedGetProperty),
   1782                            2));
   1783     details_compare.End();
   1784 
   1785     found_key_match.Else();
   1786     found_key_match.JoinContinuation(&return_or_loop_continuation);
   1787   }
   1788   if_undefined.JoinContinuation(&return_or_loop_continuation);
   1789 
   1790   IfBuilder return_or_loop(this, &return_or_loop_continuation);
   1791   return_or_loop.Then();
   1792   probe_loop.Break();
   1793 
   1794   return_or_loop.Else();
   1795   entry = AddUncasted<HAdd>(entry, count);
   1796   entry->ClearFlag(HValue::kCanOverflow);
   1797   count = AddUncasted<HAdd>(count, graph()->GetConstant1());
   1798   count->ClearFlag(HValue::kCanOverflow);
   1799   Push(entry);
   1800   Push(count);
   1801 
   1802   probe_loop.EndBody();
   1803 
   1804   return_or_loop.End();
   1805 
   1806   return Pop();
   1807 }
   1808 
   1809 HValue* HGraphBuilder::BuildCreateIterResultObject(HValue* value,
   1810                                                    HValue* done) {
   1811   NoObservableSideEffectsScope scope(this);
   1812 
   1813   // Allocate the JSIteratorResult object.
   1814   HValue* result =
   1815       Add<HAllocate>(Add<HConstant>(JSIteratorResult::kSize), HType::JSObject(),
   1816                      NOT_TENURED, JS_OBJECT_TYPE, graph()->GetConstant0());
   1817 
   1818   // Initialize the JSIteratorResult object.
   1819   HValue* native_context = BuildGetNativeContext();
   1820   HValue* map = Add<HLoadNamedField>(
   1821       native_context, nullptr,
   1822       HObjectAccess::ForContextSlot(Context::ITERATOR_RESULT_MAP_INDEX));
   1823   Add<HStoreNamedField>(result, HObjectAccess::ForMap(), map);
   1824   HValue* empty_fixed_array = Add<HLoadRoot>(Heap::kEmptyFixedArrayRootIndex);
   1825   Add<HStoreNamedField>(result, HObjectAccess::ForPropertiesPointer(),
   1826                         empty_fixed_array);
   1827   Add<HStoreNamedField>(result, HObjectAccess::ForElementsPointer(),
   1828                         empty_fixed_array);
   1829   Add<HStoreNamedField>(result, HObjectAccess::ForObservableJSObjectOffset(
   1830                                     JSIteratorResult::kValueOffset),
   1831                         value);
   1832   Add<HStoreNamedField>(result, HObjectAccess::ForObservableJSObjectOffset(
   1833                                     JSIteratorResult::kDoneOffset),
   1834                         done);
   1835   STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
   1836   return result;
   1837 }
   1838 
   1839 
   1840 HValue* HGraphBuilder::BuildNumberToString(HValue* object, AstType* type) {
   1841   NoObservableSideEffectsScope scope(this);
   1842 
   1843   // Convert constant numbers at compile time.
   1844   if (object->IsConstant() && HConstant::cast(object)->HasNumberValue()) {
   1845     Handle<Object> number = HConstant::cast(object)->handle(isolate());
   1846     Handle<String> result = isolate()->factory()->NumberToString(number);
   1847     return Add<HConstant>(result);
   1848   }
   1849 
   1850   // Create a joinable continuation.
   1851   HIfContinuation found(graph()->CreateBasicBlock(),
   1852                         graph()->CreateBasicBlock());
   1853 
   1854   // Load the number string cache.
   1855   HValue* number_string_cache =
   1856       Add<HLoadRoot>(Heap::kNumberStringCacheRootIndex);
   1857 
   1858   // Make the hash mask from the length of the number string cache. It
   1859   // contains two elements (number and string) for each cache entry.
   1860   HValue* mask = AddLoadFixedArrayLength(number_string_cache);
   1861   mask->set_type(HType::Smi());
   1862   mask = AddUncasted<HSar>(mask, graph()->GetConstant1());
   1863   mask = AddUncasted<HSub>(mask, graph()->GetConstant1());
   1864 
   1865   // Check whether object is a smi.
   1866   IfBuilder if_objectissmi(this);
   1867   if_objectissmi.If<HIsSmiAndBranch>(object);
   1868   if_objectissmi.Then();
   1869   {
   1870     // Compute hash for smi similar to smi_get_hash().
   1871     HValue* hash = AddUncasted<HBitwise>(Token::BIT_AND, object, mask);
   1872 
   1873     // Load the key.
   1874     HValue* key_index = AddUncasted<HShl>(hash, graph()->GetConstant1());
   1875     HValue* key = Add<HLoadKeyed>(number_string_cache, key_index, nullptr,
   1876                                   nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE);
   1877 
   1878     // Check if object == key.
   1879     IfBuilder if_objectiskey(this);
   1880     if_objectiskey.If<HCompareObjectEqAndBranch>(object, key);
   1881     if_objectiskey.Then();
   1882     {
   1883       // Make the key_index available.
   1884       Push(key_index);
   1885     }
   1886     if_objectiskey.JoinContinuation(&found);
   1887   }
   1888   if_objectissmi.Else();
   1889   {
   1890     if (type->Is(AstType::SignedSmall())) {
   1891       if_objectissmi.Deopt(DeoptimizeReason::kExpectedSmi);
   1892     } else {
   1893       // Check if the object is a heap number.
   1894       IfBuilder if_objectisnumber(this);
   1895       HValue* objectisnumber = if_objectisnumber.If<HCompareMap>(
   1896           object, isolate()->factory()->heap_number_map());
   1897       if_objectisnumber.Then();
   1898       {
   1899         // Compute hash for heap number similar to double_get_hash().
   1900         HValue* low = Add<HLoadNamedField>(
   1901             object, objectisnumber,
   1902             HObjectAccess::ForHeapNumberValueLowestBits());
   1903         HValue* high = Add<HLoadNamedField>(
   1904             object, objectisnumber,
   1905             HObjectAccess::ForHeapNumberValueHighestBits());
   1906         HValue* hash = AddUncasted<HBitwise>(Token::BIT_XOR, low, high);
   1907         hash = AddUncasted<HBitwise>(Token::BIT_AND, hash, mask);
   1908 
   1909         // Load the key.
   1910         HValue* key_index = AddUncasted<HShl>(hash, graph()->GetConstant1());
   1911         HValue* key =
   1912             Add<HLoadKeyed>(number_string_cache, key_index, nullptr, nullptr,
   1913                             FAST_ELEMENTS, ALLOW_RETURN_HOLE);
   1914 
   1915         // Check if the key is a heap number and compare it with the object.
   1916         IfBuilder if_keyisnotsmi(this);
   1917         HValue* keyisnotsmi = if_keyisnotsmi.IfNot<HIsSmiAndBranch>(key);
   1918         if_keyisnotsmi.Then();
   1919         {
   1920           IfBuilder if_keyisheapnumber(this);
   1921           if_keyisheapnumber.If<HCompareMap>(
   1922               key, isolate()->factory()->heap_number_map());
   1923           if_keyisheapnumber.Then();
   1924           {
   1925             // Check if values of key and object match.
   1926             IfBuilder if_keyeqobject(this);
   1927             if_keyeqobject.If<HCompareNumericAndBranch>(
   1928                 Add<HLoadNamedField>(key, keyisnotsmi,
   1929                                      HObjectAccess::ForHeapNumberValue()),
   1930                 Add<HLoadNamedField>(object, objectisnumber,
   1931                                      HObjectAccess::ForHeapNumberValue()),
   1932                 Token::EQ);
   1933             if_keyeqobject.Then();
   1934             {
   1935               // Make the key_index available.
   1936               Push(key_index);
   1937             }
   1938             if_keyeqobject.JoinContinuation(&found);
   1939           }
   1940           if_keyisheapnumber.JoinContinuation(&found);
   1941         }
   1942         if_keyisnotsmi.JoinContinuation(&found);
   1943       }
   1944       if_objectisnumber.Else();
   1945       {
   1946         if (type->Is(AstType::Number())) {
   1947           if_objectisnumber.Deopt(DeoptimizeReason::kExpectedHeapNumber);
   1948         }
   1949       }
   1950       if_objectisnumber.JoinContinuation(&found);
   1951     }
   1952   }
   1953   if_objectissmi.JoinContinuation(&found);
   1954 
   1955   // Check for cache hit.
   1956   IfBuilder if_found(this, &found);
   1957   if_found.Then();
   1958   {
   1959     // Count number to string operation in native code.
   1960     AddIncrementCounter(isolate()->counters()->number_to_string_native());
   1961 
   1962     // Load the value in case of cache hit.
   1963     HValue* key_index = Pop();
   1964     HValue* value_index = AddUncasted<HAdd>(key_index, graph()->GetConstant1());
   1965     Push(Add<HLoadKeyed>(number_string_cache, value_index, nullptr, nullptr,
   1966                          FAST_ELEMENTS, ALLOW_RETURN_HOLE));
   1967   }
   1968   if_found.Else();
   1969   {
   1970     // Cache miss, fallback to runtime.
   1971     Add<HPushArguments>(object);
   1972     Push(Add<HCallRuntime>(
   1973             Runtime::FunctionForId(Runtime::kNumberToStringSkipCache),
   1974             1));
   1975   }
   1976   if_found.End();
   1977 
   1978   return Pop();
   1979 }
   1980 
   1981 HValue* HGraphBuilder::BuildToNumber(HValue* input) {
   1982   if (input->type().IsTaggedNumber() ||
   1983       input->representation().IsSpecialization()) {
   1984     return input;
   1985   }
   1986   Callable callable = CodeFactory::ToNumber(isolate());
   1987   HValue* stub = Add<HConstant>(callable.code());
   1988   HValue* values[] = {input};
   1989   HCallWithDescriptor* instr = Add<HCallWithDescriptor>(
   1990       stub, 0, callable.descriptor(), ArrayVector(values));
   1991   instr->set_type(HType::TaggedNumber());
   1992   return instr;
   1993 }
   1994 
   1995 
   1996 HValue* HGraphBuilder::BuildToObject(HValue* receiver) {
   1997   NoObservableSideEffectsScope scope(this);
   1998 
   1999   // Create a joinable continuation.
   2000   HIfContinuation wrap(graph()->CreateBasicBlock(),
   2001                        graph()->CreateBasicBlock());
   2002 
   2003   // Determine the proper global constructor function required to wrap
   2004   // {receiver} into a JSValue, unless {receiver} is already a {JSReceiver}, in
   2005   // which case we just return it.  Deopts to Runtime::kToObject if {receiver}
   2006   // is undefined or null.
   2007   IfBuilder receiver_is_smi(this);
   2008   receiver_is_smi.If<HIsSmiAndBranch>(receiver);
   2009   receiver_is_smi.Then();
   2010   {
   2011     // Use global Number function.
   2012     Push(Add<HConstant>(Context::NUMBER_FUNCTION_INDEX));
   2013   }
   2014   receiver_is_smi.Else();
   2015   {
   2016     // Determine {receiver} map and instance type.
   2017     HValue* receiver_map =
   2018         Add<HLoadNamedField>(receiver, nullptr, HObjectAccess::ForMap());
   2019     HValue* receiver_instance_type = Add<HLoadNamedField>(
   2020         receiver_map, nullptr, HObjectAccess::ForMapInstanceType());
   2021 
   2022     // First check whether {receiver} is already a spec object (fast case).
   2023     IfBuilder receiver_is_not_spec_object(this);
   2024     receiver_is_not_spec_object.If<HCompareNumericAndBranch>(
   2025         receiver_instance_type, Add<HConstant>(FIRST_JS_RECEIVER_TYPE),
   2026         Token::LT);
   2027     receiver_is_not_spec_object.Then();
   2028     {
   2029       // Load the constructor function index from the {receiver} map.
   2030       HValue* constructor_function_index = Add<HLoadNamedField>(
   2031           receiver_map, nullptr,
   2032           HObjectAccess::ForMapInObjectPropertiesOrConstructorFunctionIndex());
   2033 
   2034       // Check if {receiver} has a constructor (null and undefined have no
   2035       // constructors, so we deoptimize to the runtime to throw an exception).
   2036       IfBuilder constructor_function_index_is_invalid(this);
   2037       constructor_function_index_is_invalid.If<HCompareNumericAndBranch>(
   2038           constructor_function_index,
   2039           Add<HConstant>(Map::kNoConstructorFunctionIndex), Token::EQ);
   2040       constructor_function_index_is_invalid.ThenDeopt(
   2041           DeoptimizeReason::kUndefinedOrNullInToObject);
   2042       constructor_function_index_is_invalid.End();
   2043 
   2044       // Use the global constructor function.
   2045       Push(constructor_function_index);
   2046     }
   2047     receiver_is_not_spec_object.JoinContinuation(&wrap);
   2048   }
   2049   receiver_is_smi.JoinContinuation(&wrap);
   2050 
   2051   // Wrap the receiver if necessary.
   2052   IfBuilder if_wrap(this, &wrap);
   2053   if_wrap.Then();
   2054   {
   2055     // Grab the constructor function index.
   2056     HValue* constructor_index = Pop();
   2057 
   2058     // Load native context.
   2059     HValue* native_context = BuildGetNativeContext();
   2060 
   2061     // Determine the initial map for the global constructor.
   2062     HValue* constructor = Add<HLoadKeyed>(native_context, constructor_index,
   2063                                           nullptr, nullptr, FAST_ELEMENTS);
   2064     HValue* constructor_initial_map = Add<HLoadNamedField>(
   2065         constructor, nullptr, HObjectAccess::ForPrototypeOrInitialMap());
   2066     // Allocate and initialize a JSValue wrapper.
   2067     HValue* value =
   2068         BuildAllocate(Add<HConstant>(JSValue::kSize), HType::JSObject(),
   2069                       JS_VALUE_TYPE, HAllocationMode());
   2070     Add<HStoreNamedField>(value, HObjectAccess::ForMap(),
   2071                           constructor_initial_map);
   2072     HValue* empty_fixed_array = Add<HLoadRoot>(Heap::kEmptyFixedArrayRootIndex);
   2073     Add<HStoreNamedField>(value, HObjectAccess::ForPropertiesPointer(),
   2074                           empty_fixed_array);
   2075     Add<HStoreNamedField>(value, HObjectAccess::ForElementsPointer(),
   2076                           empty_fixed_array);
   2077     Add<HStoreNamedField>(value, HObjectAccess::ForObservableJSObjectOffset(
   2078                                      JSValue::kValueOffset),
   2079                           receiver);
   2080     Push(value);
   2081   }
   2082   if_wrap.Else();
   2083   { Push(receiver); }
   2084   if_wrap.End();
   2085   return Pop();
   2086 }
   2087 
   2088 
   2089 HAllocate* HGraphBuilder::BuildAllocate(
   2090     HValue* object_size,
   2091     HType type,
   2092     InstanceType instance_type,
   2093     HAllocationMode allocation_mode) {
   2094   // Compute the effective allocation size.
   2095   HValue* size = object_size;
   2096   if (allocation_mode.CreateAllocationMementos()) {
   2097     size = AddUncasted<HAdd>(size, Add<HConstant>(AllocationMemento::kSize));
   2098     size->ClearFlag(HValue::kCanOverflow);
   2099   }
   2100 
   2101   // Perform the actual allocation.
   2102   HAllocate* object = Add<HAllocate>(
   2103       size, type, allocation_mode.GetPretenureMode(), instance_type,
   2104       graph()->GetConstant0(), allocation_mode.feedback_site());
   2105 
   2106   // Setup the allocation memento.
   2107   if (allocation_mode.CreateAllocationMementos()) {
   2108     BuildCreateAllocationMemento(
   2109         object, object_size, allocation_mode.current_site());
   2110   }
   2111 
   2112   return object;
   2113 }
   2114 
   2115 
   2116 HValue* HGraphBuilder::BuildAddStringLengths(HValue* left_length,
   2117                                              HValue* right_length) {
   2118   // Compute the combined string length and check against max string length.
   2119   HValue* length = AddUncasted<HAdd>(left_length, right_length);
   2120   // Check that length <= kMaxLength <=> length < MaxLength + 1.
   2121   HValue* max_length = Add<HConstant>(String::kMaxLength + 1);
   2122   if (top_info()->IsStub() || !isolate()->IsStringLengthOverflowIntact()) {
   2123     // This is a mitigation for crbug.com/627934; the real fix
   2124     // will be to migrate the StringAddStub to TurboFan one day.
   2125     IfBuilder if_invalid(this);
   2126     if_invalid.If<HCompareNumericAndBranch>(length, max_length, Token::GT);
   2127     if_invalid.Then();
   2128     {
   2129       Add<HCallRuntime>(
   2130           Runtime::FunctionForId(Runtime::kThrowInvalidStringLength), 0);
   2131     }
   2132     if_invalid.End();
   2133   } else {
   2134     graph()->MarkDependsOnStringLengthOverflow();
   2135     Add<HBoundsCheck>(length, max_length);
   2136   }
   2137   return length;
   2138 }
   2139 
   2140 
   2141 HValue* HGraphBuilder::BuildCreateConsString(
   2142     HValue* length,
   2143     HValue* left,
   2144     HValue* right,
   2145     HAllocationMode allocation_mode) {
   2146   // Determine the string instance types.
   2147   HInstruction* left_instance_type = AddLoadStringInstanceType(left);
   2148   HInstruction* right_instance_type = AddLoadStringInstanceType(right);
   2149 
   2150   // Allocate the cons string object. HAllocate does not care whether we
   2151   // pass CONS_STRING_TYPE or CONS_ONE_BYTE_STRING_TYPE here, so we just use
   2152   // CONS_STRING_TYPE here. Below we decide whether the cons string is
   2153   // one-byte or two-byte and set the appropriate map.
   2154   DCHECK(HAllocate::CompatibleInstanceTypes(CONS_STRING_TYPE,
   2155                                             CONS_ONE_BYTE_STRING_TYPE));
   2156   HAllocate* result = BuildAllocate(Add<HConstant>(ConsString::kSize),
   2157                                     HType::String(), CONS_STRING_TYPE,
   2158                                     allocation_mode);
   2159 
   2160   // Compute intersection and difference of instance types.
   2161   HValue* anded_instance_types = AddUncasted<HBitwise>(
   2162       Token::BIT_AND, left_instance_type, right_instance_type);
   2163   HValue* xored_instance_types = AddUncasted<HBitwise>(
   2164       Token::BIT_XOR, left_instance_type, right_instance_type);
   2165 
   2166   // We create a one-byte cons string if
   2167   // 1. both strings are one-byte, or
   2168   // 2. at least one of the strings is two-byte, but happens to contain only
   2169   //    one-byte characters.
   2170   // To do this, we check
   2171   // 1. if both strings are one-byte, or if the one-byte data hint is set in
   2172   //    both strings, or
   2173   // 2. if one of the strings has the one-byte data hint set and the other
   2174   //    string is one-byte.
   2175   IfBuilder if_onebyte(this);
   2176   STATIC_ASSERT(kOneByteStringTag != 0);
   2177   STATIC_ASSERT(kOneByteDataHintMask != 0);
   2178   if_onebyte.If<HCompareNumericAndBranch>(
   2179       AddUncasted<HBitwise>(
   2180           Token::BIT_AND, anded_instance_types,
   2181           Add<HConstant>(static_cast<int32_t>(
   2182                   kStringEncodingMask | kOneByteDataHintMask))),
   2183       graph()->GetConstant0(), Token::NE);
   2184   if_onebyte.Or();
   2185   STATIC_ASSERT(kOneByteStringTag != 0 &&
   2186                 kOneByteDataHintTag != 0 &&
   2187                 kOneByteDataHintTag != kOneByteStringTag);
   2188   if_onebyte.If<HCompareNumericAndBranch>(
   2189       AddUncasted<HBitwise>(
   2190           Token::BIT_AND, xored_instance_types,
   2191           Add<HConstant>(static_cast<int32_t>(
   2192                   kOneByteStringTag | kOneByteDataHintTag))),
   2193       Add<HConstant>(static_cast<int32_t>(
   2194               kOneByteStringTag | kOneByteDataHintTag)), Token::EQ);
   2195   if_onebyte.Then();
   2196   {
   2197     // We can safely skip the write barrier for storing the map here.
   2198     Add<HStoreNamedField>(
   2199         result, HObjectAccess::ForMap(),
   2200         Add<HConstant>(isolate()->factory()->cons_one_byte_string_map()));
   2201   }
   2202   if_onebyte.Else();
   2203   {
   2204     // We can safely skip the write barrier for storing the map here.
   2205     Add<HStoreNamedField>(
   2206         result, HObjectAccess::ForMap(),
   2207         Add<HConstant>(isolate()->factory()->cons_string_map()));
   2208   }
   2209   if_onebyte.End();
   2210 
   2211   // Initialize the cons string fields.
   2212   Add<HStoreNamedField>(result, HObjectAccess::ForStringHashField(),
   2213                         Add<HConstant>(String::kEmptyHashField));
   2214   Add<HStoreNamedField>(result, HObjectAccess::ForStringLength(), length);
   2215   Add<HStoreNamedField>(result, HObjectAccess::ForConsStringFirst(), left);
   2216   Add<HStoreNamedField>(result, HObjectAccess::ForConsStringSecond(), right);
   2217 
   2218   // Count the native string addition.
   2219   AddIncrementCounter(isolate()->counters()->string_add_native());
   2220 
   2221   return result;
   2222 }
   2223 
   2224 
   2225 void HGraphBuilder::BuildCopySeqStringChars(HValue* src,
   2226                                             HValue* src_offset,
   2227                                             String::Encoding src_encoding,
   2228                                             HValue* dst,
   2229                                             HValue* dst_offset,
   2230                                             String::Encoding dst_encoding,
   2231                                             HValue* length) {
   2232   DCHECK(dst_encoding != String::ONE_BYTE_ENCODING ||
   2233          src_encoding == String::ONE_BYTE_ENCODING);
   2234   LoopBuilder loop(this, context(), LoopBuilder::kPostIncrement);
   2235   HValue* index = loop.BeginBody(graph()->GetConstant0(), length, Token::LT);
   2236   {
   2237     HValue* src_index = AddUncasted<HAdd>(src_offset, index);
   2238     HValue* value =
   2239         AddUncasted<HSeqStringGetChar>(src_encoding, src, src_index);
   2240     HValue* dst_index = AddUncasted<HAdd>(dst_offset, index);
   2241     Add<HSeqStringSetChar>(dst_encoding, dst, dst_index, value);
   2242   }
   2243   loop.EndBody();
   2244 }
   2245 
   2246 
   2247 HValue* HGraphBuilder::BuildObjectSizeAlignment(
   2248     HValue* unaligned_size, int header_size) {
   2249   DCHECK((header_size & kObjectAlignmentMask) == 0);
   2250   HValue* size = AddUncasted<HAdd>(
   2251       unaligned_size, Add<HConstant>(static_cast<int32_t>(
   2252           header_size + kObjectAlignmentMask)));
   2253   size->ClearFlag(HValue::kCanOverflow);
   2254   return AddUncasted<HBitwise>(
   2255       Token::BIT_AND, size, Add<HConstant>(static_cast<int32_t>(
   2256           ~kObjectAlignmentMask)));
   2257 }
   2258 
   2259 
   2260 HValue* HGraphBuilder::BuildUncheckedStringAdd(
   2261     HValue* left,
   2262     HValue* right,
   2263     HAllocationMode allocation_mode) {
   2264   // Determine the string lengths.
   2265   HValue* left_length = AddLoadStringLength(left);
   2266   HValue* right_length = AddLoadStringLength(right);
   2267 
   2268   // Compute the combined string length.
   2269   HValue* length = BuildAddStringLengths(left_length, right_length);
   2270 
   2271   // Do some manual constant folding here.
   2272   if (left_length->IsConstant()) {
   2273     HConstant* c_left_length = HConstant::cast(left_length);
   2274     DCHECK_NE(0, c_left_length->Integer32Value());
   2275     if (c_left_length->Integer32Value() + 1 >= ConsString::kMinLength) {
   2276       // The right string contains at least one character.
   2277       return BuildCreateConsString(length, left, right, allocation_mode);
   2278     }
   2279   } else if (right_length->IsConstant()) {
   2280     HConstant* c_right_length = HConstant::cast(right_length);
   2281     DCHECK_NE(0, c_right_length->Integer32Value());
   2282     if (c_right_length->Integer32Value() + 1 >= ConsString::kMinLength) {
   2283       // The left string contains at least one character.
   2284       return BuildCreateConsString(length, left, right, allocation_mode);
   2285     }
   2286   }
   2287 
   2288   // Check if we should create a cons string.
   2289   IfBuilder if_createcons(this);
   2290   if_createcons.If<HCompareNumericAndBranch>(
   2291       length, Add<HConstant>(ConsString::kMinLength), Token::GTE);
   2292   if_createcons.Then();
   2293   {
   2294     // Create a cons string.
   2295     Push(BuildCreateConsString(length, left, right, allocation_mode));
   2296   }
   2297   if_createcons.Else();
   2298   {
   2299     // Determine the string instance types.
   2300     HValue* left_instance_type = AddLoadStringInstanceType(left);
   2301     HValue* right_instance_type = AddLoadStringInstanceType(right);
   2302 
   2303     // Compute union and difference of instance types.
   2304     HValue* ored_instance_types = AddUncasted<HBitwise>(
   2305         Token::BIT_OR, left_instance_type, right_instance_type);
   2306     HValue* xored_instance_types = AddUncasted<HBitwise>(
   2307         Token::BIT_XOR, left_instance_type, right_instance_type);
   2308 
   2309     // Check if both strings have the same encoding and both are
   2310     // sequential.
   2311     IfBuilder if_sameencodingandsequential(this);
   2312     if_sameencodingandsequential.If<HCompareNumericAndBranch>(
   2313         AddUncasted<HBitwise>(
   2314             Token::BIT_AND, xored_instance_types,
   2315             Add<HConstant>(static_cast<int32_t>(kStringEncodingMask))),
   2316         graph()->GetConstant0(), Token::EQ);
   2317     if_sameencodingandsequential.And();
   2318     STATIC_ASSERT(kSeqStringTag == 0);
   2319     if_sameencodingandsequential.If<HCompareNumericAndBranch>(
   2320         AddUncasted<HBitwise>(
   2321             Token::BIT_AND, ored_instance_types,
   2322             Add<HConstant>(static_cast<int32_t>(kStringRepresentationMask))),
   2323         graph()->GetConstant0(), Token::EQ);
   2324     if_sameencodingandsequential.Then();
   2325     {
   2326       HConstant* string_map =
   2327           Add<HConstant>(isolate()->factory()->string_map());
   2328       HConstant* one_byte_string_map =
   2329           Add<HConstant>(isolate()->factory()->one_byte_string_map());
   2330 
   2331       // Determine map and size depending on whether result is one-byte string.
   2332       IfBuilder if_onebyte(this);
   2333       STATIC_ASSERT(kOneByteStringTag != 0);
   2334       if_onebyte.If<HCompareNumericAndBranch>(
   2335           AddUncasted<HBitwise>(
   2336               Token::BIT_AND, ored_instance_types,
   2337               Add<HConstant>(static_cast<int32_t>(kStringEncodingMask))),
   2338           graph()->GetConstant0(), Token::NE);
   2339       if_onebyte.Then();
   2340       {
   2341         // Allocate sequential one-byte string object.
   2342         Push(length);
   2343         Push(one_byte_string_map);
   2344       }
   2345       if_onebyte.Else();
   2346       {
   2347         // Allocate sequential two-byte string object.
   2348         HValue* size = AddUncasted<HShl>(length, graph()->GetConstant1());
   2349         size->ClearFlag(HValue::kCanOverflow);
   2350         size->SetFlag(HValue::kUint32);
   2351         Push(size);
   2352         Push(string_map);
   2353       }
   2354       if_onebyte.End();
   2355       HValue* map = Pop();
   2356 
   2357       // Calculate the number of bytes needed for the characters in the
   2358       // string while observing object alignment.
   2359       STATIC_ASSERT((SeqString::kHeaderSize & kObjectAlignmentMask) == 0);
   2360       HValue* size = BuildObjectSizeAlignment(Pop(), SeqString::kHeaderSize);
   2361 
   2362       IfBuilder if_size(this);
   2363       if_size.If<HCompareNumericAndBranch>(
   2364           size, Add<HConstant>(kMaxRegularHeapObjectSize), Token::LT);
   2365       if_size.Then();
   2366       {
   2367         // Allocate the string object. HAllocate does not care whether we pass
   2368         // STRING_TYPE or ONE_BYTE_STRING_TYPE here, so we just use STRING_TYPE.
   2369         HAllocate* result =
   2370             BuildAllocate(size, HType::String(), STRING_TYPE, allocation_mode);
   2371         Add<HStoreNamedField>(result, HObjectAccess::ForMap(), map);
   2372 
   2373         // Initialize the string fields.
   2374         Add<HStoreNamedField>(result, HObjectAccess::ForStringHashField(),
   2375                               Add<HConstant>(String::kEmptyHashField));
   2376         Add<HStoreNamedField>(result, HObjectAccess::ForStringLength(), length);
   2377 
   2378         // Copy characters to the result string.
   2379         IfBuilder if_twobyte(this);
   2380         if_twobyte.If<HCompareObjectEqAndBranch>(map, string_map);
   2381         if_twobyte.Then();
   2382         {
   2383           // Copy characters from the left string.
   2384           BuildCopySeqStringChars(
   2385               left, graph()->GetConstant0(), String::TWO_BYTE_ENCODING, result,
   2386               graph()->GetConstant0(), String::TWO_BYTE_ENCODING, left_length);
   2387 
   2388           // Copy characters from the right string.
   2389           BuildCopySeqStringChars(
   2390               right, graph()->GetConstant0(), String::TWO_BYTE_ENCODING, result,
   2391               left_length, String::TWO_BYTE_ENCODING, right_length);
   2392         }
   2393         if_twobyte.Else();
   2394         {
   2395           // Copy characters from the left string.
   2396           BuildCopySeqStringChars(
   2397               left, graph()->GetConstant0(), String::ONE_BYTE_ENCODING, result,
   2398               graph()->GetConstant0(), String::ONE_BYTE_ENCODING, left_length);
   2399 
   2400           // Copy characters from the right string.
   2401           BuildCopySeqStringChars(
   2402               right, graph()->GetConstant0(), String::ONE_BYTE_ENCODING, result,
   2403               left_length, String::ONE_BYTE_ENCODING, right_length);
   2404         }
   2405         if_twobyte.End();
   2406 
   2407         // Count the native string addition.
   2408         AddIncrementCounter(isolate()->counters()->string_add_native());
   2409 
   2410         // Return the sequential string.
   2411         Push(result);
   2412       }
   2413       if_size.Else();
   2414       {
   2415         // Fallback to the runtime to add the two strings. The string has to be
   2416         // allocated in LO space.
   2417         Add<HPushArguments>(left, right);
   2418         Push(Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kStringAdd), 2));
   2419       }
   2420       if_size.End();
   2421     }
   2422     if_sameencodingandsequential.Else();
   2423     {
   2424       // Fallback to the runtime to add the two strings.
   2425       Add<HPushArguments>(left, right);
   2426       Push(Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kStringAdd), 2));
   2427     }
   2428     if_sameencodingandsequential.End();
   2429   }
   2430   if_createcons.End();
   2431 
   2432   return Pop();
   2433 }
   2434 
   2435 
   2436 HValue* HGraphBuilder::BuildStringAdd(
   2437     HValue* left,
   2438     HValue* right,
   2439     HAllocationMode allocation_mode) {
   2440   NoObservableSideEffectsScope no_effects(this);
   2441 
   2442   // Determine string lengths.
   2443   HValue* left_length = AddLoadStringLength(left);
   2444   HValue* right_length = AddLoadStringLength(right);
   2445 
   2446   // Check if left string is empty.
   2447   IfBuilder if_leftempty(this);
   2448   if_leftempty.If<HCompareNumericAndBranch>(
   2449       left_length, graph()->GetConstant0(), Token::EQ);
   2450   if_leftempty.Then();
   2451   {
   2452     // Count the native string addition.
   2453     AddIncrementCounter(isolate()->counters()->string_add_native());
   2454 
   2455     // Just return the right string.
   2456     Push(right);
   2457   }
   2458   if_leftempty.Else();
   2459   {
   2460     // Check if right string is empty.
   2461     IfBuilder if_rightempty(this);
   2462     if_rightempty.If<HCompareNumericAndBranch>(
   2463         right_length, graph()->GetConstant0(), Token::EQ);
   2464     if_rightempty.Then();
   2465     {
   2466       // Count the native string addition.
   2467       AddIncrementCounter(isolate()->counters()->string_add_native());
   2468 
   2469       // Just return the left string.
   2470       Push(left);
   2471     }
   2472     if_rightempty.Else();
   2473     {
   2474       // Add the two non-empty strings.
   2475       Push(BuildUncheckedStringAdd(left, right, allocation_mode));
   2476     }
   2477     if_rightempty.End();
   2478   }
   2479   if_leftempty.End();
   2480 
   2481   return Pop();
   2482 }
   2483 
   2484 
   2485 HInstruction* HGraphBuilder::BuildUncheckedMonomorphicElementAccess(
   2486     HValue* checked_object,
   2487     HValue* key,
   2488     HValue* val,
   2489     bool is_js_array,
   2490     ElementsKind elements_kind,
   2491     PropertyAccessType access_type,
   2492     LoadKeyedHoleMode load_mode,
   2493     KeyedAccessStoreMode store_mode) {
   2494   DCHECK(top_info()->IsStub() || checked_object->IsCompareMap() ||
   2495          checked_object->IsCheckMaps());
   2496   DCHECK(!IsFixedTypedArrayElementsKind(elements_kind) || !is_js_array);
   2497   // No GVNFlag is necessary for ElementsKind if there is an explicit dependency
   2498   // on a HElementsTransition instruction. The flag can also be removed if the
   2499   // map to check has FAST_HOLEY_ELEMENTS, since there can be no further
   2500   // ElementsKind transitions. Finally, the dependency can be removed for stores
   2501   // for FAST_ELEMENTS, since a transition to HOLEY elements won't change the
   2502   // generated store code.
   2503   if ((elements_kind == FAST_HOLEY_ELEMENTS) ||
   2504       (elements_kind == FAST_ELEMENTS && access_type == STORE)) {
   2505     checked_object->ClearDependsOnFlag(kElementsKind);
   2506   }
   2507 
   2508   bool fast_smi_only_elements = IsFastSmiElementsKind(elements_kind);
   2509   bool fast_elements = IsFastObjectElementsKind(elements_kind);
   2510   HValue* elements = AddLoadElements(checked_object);
   2511   if (access_type == STORE && (fast_elements || fast_smi_only_elements) &&
   2512       store_mode != STORE_NO_TRANSITION_HANDLE_COW) {
   2513     HCheckMaps* check_cow_map = Add<HCheckMaps>(
   2514         elements, isolate()->factory()->fixed_array_map());
   2515     check_cow_map->ClearDependsOnFlag(kElementsKind);
   2516   }
   2517   HInstruction* length = NULL;
   2518   if (is_js_array) {
   2519     length = Add<HLoadNamedField>(
   2520         checked_object->ActualValue(), checked_object,
   2521         HObjectAccess::ForArrayLength(elements_kind));
   2522   } else {
   2523     length = AddLoadFixedArrayLength(elements);
   2524   }
   2525   length->set_type(HType::Smi());
   2526   HValue* checked_key = NULL;
   2527   if (IsFixedTypedArrayElementsKind(elements_kind)) {
   2528     checked_object = Add<HCheckArrayBufferNotNeutered>(checked_object);
   2529 
   2530     HValue* external_pointer = Add<HLoadNamedField>(
   2531         elements, nullptr,
   2532         HObjectAccess::ForFixedTypedArrayBaseExternalPointer());
   2533     HValue* base_pointer = Add<HLoadNamedField>(
   2534         elements, nullptr, HObjectAccess::ForFixedTypedArrayBaseBasePointer());
   2535     HValue* backing_store = AddUncasted<HAdd>(external_pointer, base_pointer,
   2536                                               AddOfExternalAndTagged);
   2537 
   2538     if (store_mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS) {
   2539       NoObservableSideEffectsScope no_effects(this);
   2540       IfBuilder length_checker(this);
   2541       length_checker.If<HCompareNumericAndBranch>(key, length, Token::LT);
   2542       length_checker.Then();
   2543       IfBuilder negative_checker(this);
   2544       HValue* bounds_check = negative_checker.If<HCompareNumericAndBranch>(
   2545           key, graph()->GetConstant0(), Token::GTE);
   2546       negative_checker.Then();
   2547       HInstruction* result = AddElementAccess(
   2548           backing_store, key, val, bounds_check, checked_object->ActualValue(),
   2549           elements_kind, access_type);
   2550       negative_checker.ElseDeopt(DeoptimizeReason::kNegativeKeyEncountered);
   2551       negative_checker.End();
   2552       length_checker.End();
   2553       return result;
   2554     } else {
   2555       DCHECK(store_mode == STANDARD_STORE);
   2556       checked_key = Add<HBoundsCheck>(key, length);
   2557       return AddElementAccess(backing_store, checked_key, val, checked_object,
   2558                               checked_object->ActualValue(), elements_kind,
   2559                               access_type);
   2560     }
   2561   }
   2562   DCHECK(fast_smi_only_elements ||
   2563          fast_elements ||
   2564          IsFastDoubleElementsKind(elements_kind));
   2565 
   2566   // In case val is stored into a fast smi array, assure that the value is a smi
   2567   // before manipulating the backing store. Otherwise the actual store may
   2568   // deopt, leaving the backing store in an invalid state.
   2569   if (access_type == STORE && IsFastSmiElementsKind(elements_kind) &&
   2570       !val->type().IsSmi()) {
   2571     val = AddUncasted<HForceRepresentation>(val, Representation::Smi());
   2572   }
   2573 
   2574   if (IsGrowStoreMode(store_mode)) {
   2575     NoObservableSideEffectsScope no_effects(this);
   2576     Representation representation = HStoreKeyed::RequiredValueRepresentation(
   2577         elements_kind, STORE_TO_INITIALIZED_ENTRY);
   2578     val = AddUncasted<HForceRepresentation>(val, representation);
   2579     elements = BuildCheckForCapacityGrow(checked_object, elements,
   2580                                          elements_kind, length, key,
   2581                                          is_js_array, access_type);
   2582     checked_key = key;
   2583   } else {
   2584     checked_key = Add<HBoundsCheck>(key, length);
   2585 
   2586     if (access_type == STORE && (fast_elements || fast_smi_only_elements)) {
   2587       if (store_mode == STORE_NO_TRANSITION_HANDLE_COW) {
   2588         NoObservableSideEffectsScope no_effects(this);
   2589         elements = BuildCopyElementsOnWrite(checked_object, elements,
   2590                                             elements_kind, length);
   2591       } else {
   2592         HCheckMaps* check_cow_map = Add<HCheckMaps>(
   2593             elements, isolate()->factory()->fixed_array_map());
   2594         check_cow_map->ClearDependsOnFlag(kElementsKind);
   2595       }
   2596     }
   2597   }
   2598   return AddElementAccess(elements, checked_key, val, checked_object, nullptr,
   2599                           elements_kind, access_type, load_mode);
   2600 }
   2601 
   2602 
   2603 HValue* HGraphBuilder::BuildCalculateElementsSize(ElementsKind kind,
   2604                                                   HValue* capacity) {
   2605   int elements_size = IsFastDoubleElementsKind(kind)
   2606       ? kDoubleSize
   2607       : kPointerSize;
   2608 
   2609   HConstant* elements_size_value = Add<HConstant>(elements_size);
   2610   HInstruction* mul =
   2611       HMul::NewImul(isolate(), zone(), context(), capacity->ActualValue(),
   2612                     elements_size_value);
   2613   AddInstruction(mul);
   2614   mul->ClearFlag(HValue::kCanOverflow);
   2615 
   2616   STATIC_ASSERT(FixedDoubleArray::kHeaderSize == FixedArray::kHeaderSize);
   2617 
   2618   HConstant* header_size = Add<HConstant>(FixedArray::kHeaderSize);
   2619   HValue* total_size = AddUncasted<HAdd>(mul, header_size);
   2620   total_size->ClearFlag(HValue::kCanOverflow);
   2621   return total_size;
   2622 }
   2623 
   2624 
   2625 HAllocate* HGraphBuilder::AllocateJSArrayObject(AllocationSiteMode mode) {
   2626   int base_size = JSArray::kSize;
   2627   if (mode == TRACK_ALLOCATION_SITE) {
   2628     base_size += AllocationMemento::kSize;
   2629   }
   2630   HConstant* size_in_bytes = Add<HConstant>(base_size);
   2631   return Add<HAllocate>(size_in_bytes, HType::JSArray(), NOT_TENURED,
   2632                         JS_OBJECT_TYPE, graph()->GetConstant0());
   2633 }
   2634 
   2635 
   2636 HConstant* HGraphBuilder::EstablishElementsAllocationSize(
   2637     ElementsKind kind,
   2638     int capacity) {
   2639   int base_size = IsFastDoubleElementsKind(kind)
   2640       ? FixedDoubleArray::SizeFor(capacity)
   2641       : FixedArray::SizeFor(capacity);
   2642 
   2643   return Add<HConstant>(base_size);
   2644 }
   2645 
   2646 
   2647 HAllocate* HGraphBuilder::BuildAllocateElements(ElementsKind kind,
   2648                                                 HValue* size_in_bytes) {
   2649   InstanceType instance_type = IsFastDoubleElementsKind(kind)
   2650       ? FIXED_DOUBLE_ARRAY_TYPE
   2651       : FIXED_ARRAY_TYPE;
   2652 
   2653   return Add<HAllocate>(size_in_bytes, HType::HeapObject(), NOT_TENURED,
   2654                         instance_type, graph()->GetConstant0());
   2655 }
   2656 
   2657 
   2658 void HGraphBuilder::BuildInitializeElementsHeader(HValue* elements,
   2659                                                   ElementsKind kind,
   2660                                                   HValue* capacity) {
   2661   Factory* factory = isolate()->factory();
   2662   Handle<Map> map = IsFastDoubleElementsKind(kind)
   2663       ? factory->fixed_double_array_map()
   2664       : factory->fixed_array_map();
   2665 
   2666   Add<HStoreNamedField>(elements, HObjectAccess::ForMap(), Add<HConstant>(map));
   2667   Add<HStoreNamedField>(elements, HObjectAccess::ForFixedArrayLength(),
   2668                         capacity);
   2669 }
   2670 
   2671 
   2672 HValue* HGraphBuilder::BuildAllocateAndInitializeArray(ElementsKind kind,
   2673                                                        HValue* capacity) {
   2674   // The HForceRepresentation is to prevent possible deopt on int-smi
   2675   // conversion after allocation but before the new object fields are set.
   2676   capacity = AddUncasted<HForceRepresentation>(capacity, Representation::Smi());
   2677   HValue* size_in_bytes = BuildCalculateElementsSize(kind, capacity);
   2678   HValue* new_array = BuildAllocateElements(kind, size_in_bytes);
   2679   BuildInitializeElementsHeader(new_array, kind, capacity);
   2680   return new_array;
   2681 }
   2682 
   2683 
   2684 void HGraphBuilder::BuildJSArrayHeader(HValue* array,
   2685                                        HValue* array_map,
   2686                                        HValue* elements,
   2687                                        AllocationSiteMode mode,
   2688                                        ElementsKind elements_kind,
   2689                                        HValue* allocation_site_payload,
   2690                                        HValue* length_field) {
   2691   Add<HStoreNamedField>(array, HObjectAccess::ForMap(), array_map);
   2692 
   2693   HValue* empty_fixed_array = Add<HLoadRoot>(Heap::kEmptyFixedArrayRootIndex);
   2694 
   2695   Add<HStoreNamedField>(
   2696       array, HObjectAccess::ForPropertiesPointer(), empty_fixed_array);
   2697 
   2698   Add<HStoreNamedField>(array, HObjectAccess::ForElementsPointer(),
   2699                         elements != nullptr ? elements : empty_fixed_array);
   2700 
   2701   Add<HStoreNamedField>(
   2702       array, HObjectAccess::ForArrayLength(elements_kind), length_field);
   2703 
   2704   if (mode == TRACK_ALLOCATION_SITE) {
   2705     BuildCreateAllocationMemento(
   2706         array, Add<HConstant>(JSArray::kSize), allocation_site_payload);
   2707   }
   2708 }
   2709 
   2710 
   2711 HInstruction* HGraphBuilder::AddElementAccess(
   2712     HValue* elements, HValue* checked_key, HValue* val, HValue* dependency,
   2713     HValue* backing_store_owner, ElementsKind elements_kind,
   2714     PropertyAccessType access_type, LoadKeyedHoleMode load_mode) {
   2715   if (access_type == STORE) {
   2716     DCHECK(val != NULL);
   2717     if (elements_kind == UINT8_CLAMPED_ELEMENTS) {
   2718       val = Add<HClampToUint8>(val);
   2719     }
   2720     return Add<HStoreKeyed>(elements, checked_key, val, backing_store_owner,
   2721                             elements_kind, STORE_TO_INITIALIZED_ENTRY);
   2722   }
   2723 
   2724   DCHECK(access_type == LOAD);
   2725   DCHECK(val == NULL);
   2726   HLoadKeyed* load =
   2727       Add<HLoadKeyed>(elements, checked_key, dependency, backing_store_owner,
   2728                       elements_kind, load_mode);
   2729   if (elements_kind == UINT32_ELEMENTS) {
   2730     graph()->RecordUint32Instruction(load);
   2731   }
   2732   return load;
   2733 }
   2734 
   2735 
   2736 HLoadNamedField* HGraphBuilder::AddLoadMap(HValue* object,
   2737                                            HValue* dependency) {
   2738   return Add<HLoadNamedField>(object, dependency, HObjectAccess::ForMap());
   2739 }
   2740 
   2741 
   2742 HLoadNamedField* HGraphBuilder::AddLoadElements(HValue* object,
   2743                                                 HValue* dependency) {
   2744   return Add<HLoadNamedField>(
   2745       object, dependency, HObjectAccess::ForElementsPointer());
   2746 }
   2747 
   2748 
   2749 HLoadNamedField* HGraphBuilder::AddLoadFixedArrayLength(
   2750     HValue* array,
   2751     HValue* dependency) {
   2752   return Add<HLoadNamedField>(
   2753       array, dependency, HObjectAccess::ForFixedArrayLength());
   2754 }
   2755 
   2756 
   2757 HLoadNamedField* HGraphBuilder::AddLoadArrayLength(HValue* array,
   2758                                                    ElementsKind kind,
   2759                                                    HValue* dependency) {
   2760   return Add<HLoadNamedField>(
   2761       array, dependency, HObjectAccess::ForArrayLength(kind));
   2762 }
   2763 
   2764 
   2765 HValue* HGraphBuilder::BuildNewElementsCapacity(HValue* old_capacity) {
   2766   HValue* half_old_capacity = AddUncasted<HShr>(old_capacity,
   2767                                                 graph_->GetConstant1());
   2768 
   2769   HValue* new_capacity = AddUncasted<HAdd>(half_old_capacity, old_capacity);
   2770   new_capacity->ClearFlag(HValue::kCanOverflow);
   2771 
   2772   HValue* min_growth = Add<HConstant>(16);
   2773 
   2774   new_capacity = AddUncasted<HAdd>(new_capacity, min_growth);
   2775   new_capacity->ClearFlag(HValue::kCanOverflow);
   2776 
   2777   return new_capacity;
   2778 }
   2779 
   2780 
   2781 HValue* HGraphBuilder::BuildGrowElementsCapacity(HValue* object,
   2782                                                  HValue* elements,
   2783                                                  ElementsKind kind,
   2784                                                  ElementsKind new_kind,
   2785                                                  HValue* length,
   2786                                                  HValue* new_capacity) {
   2787   Add<HBoundsCheck>(
   2788       new_capacity,
   2789       Add<HConstant>((kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) >>
   2790                      ElementsKindToShiftSize(new_kind)));
   2791 
   2792   HValue* new_elements =
   2793       BuildAllocateAndInitializeArray(new_kind, new_capacity);
   2794 
   2795   BuildCopyElements(elements, kind, new_elements,
   2796                     new_kind, length, new_capacity);
   2797 
   2798   Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
   2799                         new_elements);
   2800 
   2801   return new_elements;
   2802 }
   2803 
   2804 
   2805 void HGraphBuilder::BuildFillElementsWithValue(HValue* elements,
   2806                                                ElementsKind elements_kind,
   2807                                                HValue* from,
   2808                                                HValue* to,
   2809                                                HValue* value) {
   2810   if (to == NULL) {
   2811     to = AddLoadFixedArrayLength(elements);
   2812   }
   2813 
   2814   // Special loop unfolding case
   2815   STATIC_ASSERT(JSArray::kPreallocatedArrayElements <=
   2816                 kElementLoopUnrollThreshold);
   2817   int initial_capacity = -1;
   2818   if (from->IsInteger32Constant() && to->IsInteger32Constant()) {
   2819     int constant_from = from->GetInteger32Constant();
   2820     int constant_to = to->GetInteger32Constant();
   2821 
   2822     if (constant_from == 0 && constant_to <= kElementLoopUnrollThreshold) {
   2823       initial_capacity = constant_to;
   2824     }
   2825   }
   2826 
   2827   if (initial_capacity >= 0) {
   2828     for (int i = 0; i < initial_capacity; i++) {
   2829       HInstruction* key = Add<HConstant>(i);
   2830       Add<HStoreKeyed>(elements, key, value, nullptr, elements_kind);
   2831     }
   2832   } else {
   2833     // Carefully loop backwards so that the "from" remains live through the loop
   2834     // rather than the to. This often corresponds to keeping length live rather
   2835     // then capacity, which helps register allocation, since length is used more
   2836     // other than capacity after filling with holes.
   2837     LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement);
   2838 
   2839     HValue* key = builder.BeginBody(to, from, Token::GT);
   2840 
   2841     HValue* adjusted_key = AddUncasted<HSub>(key, graph()->GetConstant1());
   2842     adjusted_key->ClearFlag(HValue::kCanOverflow);
   2843 
   2844     Add<HStoreKeyed>(elements, adjusted_key, value, nullptr, elements_kind);
   2845 
   2846     builder.EndBody();
   2847   }
   2848 }
   2849 
   2850 
   2851 void HGraphBuilder::BuildFillElementsWithHole(HValue* elements,
   2852                                               ElementsKind elements_kind,
   2853                                               HValue* from,
   2854                                               HValue* to) {
   2855   // Fast elements kinds need to be initialized in case statements below cause a
   2856   // garbage collection.
   2857 
   2858   HValue* hole = IsFastSmiOrObjectElementsKind(elements_kind)
   2859                      ? graph()->GetConstantHole()
   2860                      : Add<HConstant>(HConstant::kHoleNaN);
   2861 
   2862   // Since we're about to store a hole value, the store instruction below must
   2863   // assume an elements kind that supports heap object values.
   2864   if (IsFastSmiOrObjectElementsKind(elements_kind)) {
   2865     elements_kind = FAST_HOLEY_ELEMENTS;
   2866   }
   2867 
   2868   BuildFillElementsWithValue(elements, elements_kind, from, to, hole);
   2869 }
   2870 
   2871 
   2872 void HGraphBuilder::BuildCopyProperties(HValue* from_properties,
   2873                                         HValue* to_properties, HValue* length,
   2874                                         HValue* capacity) {
   2875   ElementsKind kind = FAST_ELEMENTS;
   2876 
   2877   BuildFillElementsWithValue(to_properties, kind, length, capacity,
   2878                              graph()->GetConstantUndefined());
   2879 
   2880   LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement);
   2881 
   2882   HValue* key = builder.BeginBody(length, graph()->GetConstant0(), Token::GT);
   2883 
   2884   key = AddUncasted<HSub>(key, graph()->GetConstant1());
   2885   key->ClearFlag(HValue::kCanOverflow);
   2886 
   2887   HValue* element =
   2888       Add<HLoadKeyed>(from_properties, key, nullptr, nullptr, kind);
   2889 
   2890   Add<HStoreKeyed>(to_properties, key, element, nullptr, kind);
   2891 
   2892   builder.EndBody();
   2893 }
   2894 
   2895 
   2896 void HGraphBuilder::BuildCopyElements(HValue* from_elements,
   2897                                       ElementsKind from_elements_kind,
   2898                                       HValue* to_elements,
   2899                                       ElementsKind to_elements_kind,
   2900                                       HValue* length,
   2901                                       HValue* capacity) {
   2902   int constant_capacity = -1;
   2903   if (capacity != NULL &&
   2904       capacity->IsConstant() &&
   2905       HConstant::cast(capacity)->HasInteger32Value()) {
   2906     int constant_candidate = HConstant::cast(capacity)->Integer32Value();
   2907     if (constant_candidate <= kElementLoopUnrollThreshold) {
   2908       constant_capacity = constant_candidate;
   2909     }
   2910   }
   2911 
   2912   bool pre_fill_with_holes =
   2913     IsFastDoubleElementsKind(from_elements_kind) &&
   2914     IsFastObjectElementsKind(to_elements_kind);
   2915   if (pre_fill_with_holes) {
   2916     // If the copy might trigger a GC, make sure that the FixedArray is
   2917     // pre-initialized with holes to make sure that it's always in a
   2918     // consistent state.
   2919     BuildFillElementsWithHole(to_elements, to_elements_kind,
   2920                               graph()->GetConstant0(), NULL);
   2921   }
   2922 
   2923   if (constant_capacity != -1) {
   2924     // Unroll the loop for small elements kinds.
   2925     for (int i = 0; i < constant_capacity; i++) {
   2926       HValue* key_constant = Add<HConstant>(i);
   2927       HInstruction* value = Add<HLoadKeyed>(
   2928           from_elements, key_constant, nullptr, nullptr, from_elements_kind);
   2929       Add<HStoreKeyed>(to_elements, key_constant, value, nullptr,
   2930                        to_elements_kind);
   2931     }
   2932   } else {
   2933     if (!pre_fill_with_holes &&
   2934         (capacity == NULL || !length->Equals(capacity))) {
   2935       BuildFillElementsWithHole(to_elements, to_elements_kind,
   2936                                 length, NULL);
   2937     }
   2938 
   2939     LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement);
   2940 
   2941     HValue* key = builder.BeginBody(length, graph()->GetConstant0(),
   2942                                     Token::GT);
   2943 
   2944     key = AddUncasted<HSub>(key, graph()->GetConstant1());
   2945     key->ClearFlag(HValue::kCanOverflow);
   2946 
   2947     HValue* element = Add<HLoadKeyed>(from_elements, key, nullptr, nullptr,
   2948                                       from_elements_kind, ALLOW_RETURN_HOLE);
   2949 
   2950     ElementsKind kind = (IsHoleyElementsKind(from_elements_kind) &&
   2951                          IsFastSmiElementsKind(to_elements_kind))
   2952       ? FAST_HOLEY_ELEMENTS : to_elements_kind;
   2953 
   2954     if (IsHoleyElementsKind(from_elements_kind) &&
   2955         from_elements_kind != to_elements_kind) {
   2956       IfBuilder if_hole(this);
   2957       if_hole.If<HCompareHoleAndBranch>(element);
   2958       if_hole.Then();
   2959       HConstant* hole_constant = IsFastDoubleElementsKind(to_elements_kind)
   2960                                      ? Add<HConstant>(HConstant::kHoleNaN)
   2961                                      : graph()->GetConstantHole();
   2962       Add<HStoreKeyed>(to_elements, key, hole_constant, nullptr, kind);
   2963       if_hole.Else();
   2964       HStoreKeyed* store =
   2965           Add<HStoreKeyed>(to_elements, key, element, nullptr, kind);
   2966       store->SetFlag(HValue::kTruncatingToNumber);
   2967       if_hole.End();
   2968     } else {
   2969       HStoreKeyed* store =
   2970           Add<HStoreKeyed>(to_elements, key, element, nullptr, kind);
   2971       store->SetFlag(HValue::kTruncatingToNumber);
   2972     }
   2973 
   2974     builder.EndBody();
   2975   }
   2976 
   2977   Counters* counters = isolate()->counters();
   2978   AddIncrementCounter(counters->inlined_copied_elements());
   2979 }
   2980 
   2981 void HGraphBuilder::BuildCreateAllocationMemento(
   2982     HValue* previous_object,
   2983     HValue* previous_object_size,
   2984     HValue* allocation_site) {
   2985   DCHECK(allocation_site != NULL);
   2986   HInnerAllocatedObject* allocation_memento = Add<HInnerAllocatedObject>(
   2987       previous_object, previous_object_size, HType::HeapObject());
   2988   AddStoreMapConstant(
   2989       allocation_memento, isolate()->factory()->allocation_memento_map());
   2990   Add<HStoreNamedField>(
   2991       allocation_memento,
   2992       HObjectAccess::ForAllocationMementoSite(),
   2993       allocation_site);
   2994   if (FLAG_allocation_site_pretenuring) {
   2995     HValue* memento_create_count =
   2996         Add<HLoadNamedField>(allocation_site, nullptr,
   2997                              HObjectAccess::ForAllocationSiteOffset(
   2998                                  AllocationSite::kPretenureCreateCountOffset));
   2999     memento_create_count = AddUncasted<HAdd>(
   3000         memento_create_count, graph()->GetConstant1());
   3001     // This smi value is reset to zero after every gc, overflow isn't a problem
   3002     // since the counter is bounded by the new space size.
   3003     memento_create_count->ClearFlag(HValue::kCanOverflow);
   3004     Add<HStoreNamedField>(
   3005         allocation_site, HObjectAccess::ForAllocationSiteOffset(
   3006             AllocationSite::kPretenureCreateCountOffset), memento_create_count);
   3007   }
   3008 }
   3009 
   3010 
   3011 HInstruction* HGraphBuilder::BuildGetNativeContext() {
   3012   return Add<HLoadNamedField>(
   3013       context(), nullptr,
   3014       HObjectAccess::ForContextSlot(Context::NATIVE_CONTEXT_INDEX));
   3015 }
   3016 
   3017 
   3018 HInstruction* HGraphBuilder::BuildGetNativeContext(HValue* closure) {
   3019   // Get the global object, then the native context
   3020   HInstruction* context = Add<HLoadNamedField>(
   3021       closure, nullptr, HObjectAccess::ForFunctionContextPointer());
   3022   return Add<HLoadNamedField>(
   3023       context, nullptr,
   3024       HObjectAccess::ForContextSlot(Context::NATIVE_CONTEXT_INDEX));
   3025 }
   3026 
   3027 
   3028 HValue* HGraphBuilder::BuildGetParentContext(HValue* depth, int depth_value) {
   3029   HValue* script_context = context();
   3030   if (depth != NULL) {
   3031     HValue* zero = graph()->GetConstant0();
   3032 
   3033     Push(script_context);
   3034     Push(depth);
   3035 
   3036     LoopBuilder loop(this);
   3037     loop.BeginBody(2);  // Drop script_context and depth from last environment
   3038                         // to appease live range building without simulates.
   3039     depth = Pop();
   3040     script_context = Pop();
   3041 
   3042     script_context = Add<HLoadNamedField>(
   3043         script_context, nullptr,
   3044         HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
   3045     depth = AddUncasted<HSub>(depth, graph()->GetConstant1());
   3046     depth->ClearFlag(HValue::kCanOverflow);
   3047 
   3048     IfBuilder if_break(this);
   3049     if_break.If<HCompareNumericAndBranch, HValue*>(depth, zero, Token::EQ);
   3050     if_break.Then();
   3051     {
   3052       Push(script_context);  // The result.
   3053       loop.Break();
   3054     }
   3055     if_break.Else();
   3056     {
   3057       Push(script_context);
   3058       Push(depth);
   3059     }
   3060     loop.EndBody();
   3061     if_break.End();
   3062 
   3063     script_context = Pop();
   3064   } else if (depth_value > 0) {
   3065     // Unroll the above loop.
   3066     for (int i = 0; i < depth_value; i++) {
   3067       script_context = Add<HLoadNamedField>(
   3068           script_context, nullptr,
   3069           HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
   3070     }
   3071   }
   3072   return script_context;
   3073 }
   3074 
   3075 
   3076 HInstruction* HGraphBuilder::BuildGetArrayFunction() {
   3077   HInstruction* native_context = BuildGetNativeContext();
   3078   HInstruction* index =
   3079       Add<HConstant>(static_cast<int32_t>(Context::ARRAY_FUNCTION_INDEX));
   3080   return Add<HLoadKeyed>(native_context, index, nullptr, nullptr,
   3081                          FAST_ELEMENTS);
   3082 }
   3083 
   3084 
   3085 HValue* HGraphBuilder::BuildArrayBufferViewFieldAccessor(HValue* object,
   3086                                                          HValue* checked_object,
   3087                                                          FieldIndex index) {
   3088   NoObservableSideEffectsScope scope(this);
   3089   HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(
   3090       index.offset(), Representation::Tagged());
   3091   HInstruction* buffer = Add<HLoadNamedField>(
   3092       object, checked_object, HObjectAccess::ForJSArrayBufferViewBuffer());
   3093   HInstruction* field = Add<HLoadNamedField>(object, checked_object, access);
   3094 
   3095   HInstruction* flags = Add<HLoadNamedField>(
   3096       buffer, nullptr, HObjectAccess::ForJSArrayBufferBitField());
   3097   HValue* was_neutered_mask =
   3098       Add<HConstant>(1 << JSArrayBuffer::WasNeutered::kShift);
   3099   HValue* was_neutered_test =
   3100       AddUncasted<HBitwise>(Token::BIT_AND, flags, was_neutered_mask);
   3101 
   3102   IfBuilder if_was_neutered(this);
   3103   if_was_neutered.If<HCompareNumericAndBranch>(
   3104       was_neutered_test, graph()->GetConstant0(), Token::NE);
   3105   if_was_neutered.Then();
   3106   Push(graph()->GetConstant0());
   3107   if_was_neutered.Else();
   3108   Push(field);
   3109   if_was_neutered.End();
   3110 
   3111   return Pop();
   3112 }
   3113 
   3114 HValue* HGraphBuilder::AddLoadJSBuiltin(int context_index) {
   3115   HValue* native_context = BuildGetNativeContext();
   3116   HObjectAccess function_access = HObjectAccess::ForContextSlot(context_index);
   3117   return Add<HLoadNamedField>(native_context, nullptr, function_access);
   3118 }
   3119 
   3120 HOptimizedGraphBuilder::HOptimizedGraphBuilder(CompilationInfo* info,
   3121                                                bool track_positions)
   3122     : HGraphBuilder(info, CallInterfaceDescriptor(), track_positions),
   3123       function_state_(NULL),
   3124       initial_function_state_(this, info, NORMAL_RETURN, -1,
   3125                               TailCallMode::kAllow),
   3126       ast_context_(NULL),
   3127       break_scope_(NULL),
   3128       inlined_count_(0),
   3129       globals_(10, info->zone()),
   3130       osr_(new (info->zone()) HOsrBuilder(this)),
   3131       bounds_(info->zone()) {
   3132   // This is not initialized in the initializer list because the
   3133   // constructor for the initial state relies on function_state_ == NULL
   3134   // to know it's the initial state.
   3135   function_state_ = &initial_function_state_;
   3136   InitializeAstVisitor(info->isolate());
   3137 }
   3138 
   3139 
   3140 HBasicBlock* HOptimizedGraphBuilder::CreateJoin(HBasicBlock* first,
   3141                                                 HBasicBlock* second,
   3142                                                 BailoutId join_id) {
   3143   if (first == NULL) {
   3144     return second;
   3145   } else if (second == NULL) {
   3146     return first;
   3147   } else {
   3148     HBasicBlock* join_block = graph()->CreateBasicBlock();
   3149     Goto(first, join_block);
   3150     Goto(second, join_block);
   3151     join_block->SetJoinId(join_id);
   3152     return join_block;
   3153   }
   3154 }
   3155 
   3156 HBasicBlock* HOptimizedGraphBuilder::JoinContinue(IterationStatement* statement,
   3157                                                   BailoutId continue_id,
   3158                                                   HBasicBlock* exit_block,
   3159                                                   HBasicBlock* continue_block) {
   3160   if (continue_block != NULL) {
   3161     if (exit_block != NULL) Goto(exit_block, continue_block);
   3162     continue_block->SetJoinId(continue_id);
   3163     return continue_block;
   3164   }
   3165   return exit_block;
   3166 }
   3167 
   3168 
   3169 HBasicBlock* HOptimizedGraphBuilder::CreateLoop(IterationStatement* statement,
   3170                                                 HBasicBlock* loop_entry,
   3171                                                 HBasicBlock* body_exit,
   3172                                                 HBasicBlock* loop_successor,
   3173                                                 HBasicBlock* break_block) {
   3174   if (body_exit != NULL) Goto(body_exit, loop_entry);
   3175   loop_entry->PostProcessLoopHeader(statement);
   3176   if (break_block != NULL) {
   3177     if (loop_successor != NULL) Goto(loop_successor, break_block);
   3178     break_block->SetJoinId(statement->ExitId());
   3179     return break_block;
   3180   }
   3181   return loop_successor;
   3182 }
   3183 
   3184 
   3185 // Build a new loop header block and set it as the current block.
   3186 HBasicBlock* HOptimizedGraphBuilder::BuildLoopEntry() {
   3187   HBasicBlock* loop_entry = CreateLoopHeaderBlock();
   3188   Goto(loop_entry);
   3189   set_current_block(loop_entry);
   3190   return loop_entry;
   3191 }
   3192 
   3193 
   3194 HBasicBlock* HOptimizedGraphBuilder::BuildLoopEntry(
   3195     IterationStatement* statement) {
   3196   HBasicBlock* loop_entry;
   3197 
   3198   if (osr()->HasOsrEntryAt(statement)) {
   3199     loop_entry = osr()->BuildOsrLoopEntry(statement);
   3200     if (function_state()->IsInsideDoExpressionScope()) {
   3201       Bailout(kDoExpressionUnmodelable);
   3202     }
   3203   } else {
   3204     loop_entry = BuildLoopEntry();
   3205   }
   3206   return loop_entry;
   3207 }
   3208 
   3209 
   3210 void HBasicBlock::FinishExit(HControlInstruction* instruction,
   3211                              SourcePosition position) {
   3212   Finish(instruction, position);
   3213   ClearEnvironment();
   3214 }
   3215 
   3216 
   3217 std::ostream& operator<<(std::ostream& os, const HBasicBlock& b) {
   3218   return os << "B" << b.block_id();
   3219 }
   3220 
   3221 HGraph::HGraph(CompilationInfo* info, CallInterfaceDescriptor descriptor)
   3222     : isolate_(info->isolate()),
   3223       next_block_id_(0),
   3224       entry_block_(NULL),
   3225       blocks_(8, info->zone()),
   3226       values_(16, info->zone()),
   3227       phi_list_(NULL),
   3228       uint32_instructions_(NULL),
   3229       osr_(NULL),
   3230       info_(info),
   3231       descriptor_(descriptor),
   3232       zone_(info->zone()),
   3233       allow_code_motion_(false),
   3234       use_optimistic_licm_(false),
   3235       depends_on_empty_array_proto_elements_(false),
   3236       depends_on_string_length_overflow_(false),
   3237       type_change_checksum_(0),
   3238       maximum_environment_size_(0),
   3239       no_side_effects_scope_count_(0),
   3240       disallow_adding_new_values_(false) {
   3241   if (info->IsStub()) {
   3242     // For stubs, explicitly add the context to the environment.
   3243     start_environment_ =
   3244         new (zone_) HEnvironment(zone_, descriptor.GetParameterCount() + 1);
   3245   } else {
   3246     start_environment_ =
   3247         new(zone_) HEnvironment(NULL, info->scope(), info->closure(), zone_);
   3248   }
   3249   start_environment_->set_ast_id(BailoutId::FunctionContext());
   3250   entry_block_ = CreateBasicBlock();
   3251   entry_block_->SetInitialEnvironment(start_environment_);
   3252 }
   3253 
   3254 
   3255 HBasicBlock* HGraph::CreateBasicBlock() {
   3256   HBasicBlock* result = new(zone()) HBasicBlock(this);
   3257   blocks_.Add(result, zone());
   3258   return result;
   3259 }
   3260 
   3261 
   3262 void HGraph::FinalizeUniqueness() {
   3263   DisallowHeapAllocation no_gc;
   3264   for (int i = 0; i < blocks()->length(); ++i) {
   3265     for (HInstructionIterator it(blocks()->at(i)); !it.Done(); it.Advance()) {
   3266       it.Current()->FinalizeUniqueness();
   3267     }
   3268   }
   3269 }
   3270 
   3271 
   3272 // Block ordering was implemented with two mutually recursive methods,
   3273 // HGraph::Postorder and HGraph::PostorderLoopBlocks.
   3274 // The recursion could lead to stack overflow so the algorithm has been
   3275 // implemented iteratively.
   3276 // At a high level the algorithm looks like this:
   3277 //
   3278 // Postorder(block, loop_header) : {
   3279 //   if (block has already been visited or is of another loop) return;
   3280 //   mark block as visited;
   3281 //   if (block is a loop header) {
   3282 //     VisitLoopMembers(block, loop_header);
   3283 //     VisitSuccessorsOfLoopHeader(block);
   3284 //   } else {
   3285 //     VisitSuccessors(block)
   3286 //   }
   3287 //   put block in result list;
   3288 // }
   3289 //
   3290 // VisitLoopMembers(block, outer_loop_header) {
   3291 //   foreach (block b in block loop members) {
   3292 //     VisitSuccessorsOfLoopMember(b, outer_loop_header);
   3293 //     if (b is loop header) VisitLoopMembers(b);
   3294 //   }
   3295 // }
   3296 //
   3297 // VisitSuccessorsOfLoopMember(block, outer_loop_header) {
   3298 //   foreach (block b in block successors) Postorder(b, outer_loop_header)
   3299 // }
   3300 //
   3301 // VisitSuccessorsOfLoopHeader(block) {
   3302 //   foreach (block b in block successors) Postorder(b, block)
   3303 // }
   3304 //
   3305 // VisitSuccessors(block, loop_header) {
   3306 //   foreach (block b in block successors) Postorder(b, loop_header)
   3307 // }
   3308 //
   3309 // The ordering is started calling Postorder(entry, NULL).
   3310 //
   3311 // Each instance of PostorderProcessor represents the "stack frame" of the
   3312 // recursion, and particularly keeps the state of the loop (iteration) of the
   3313 // "Visit..." function it represents.
   3314 // To recycle memory we keep all the frames in a double linked list but
   3315 // this means that we cannot use constructors to initialize the frames.
   3316 //
   3317 class PostorderProcessor : public ZoneObject {
   3318  public:
   3319   // Back link (towards the stack bottom).
   3320   PostorderProcessor* parent() {return father_; }
   3321   // Forward link (towards the stack top).
   3322   PostorderProcessor* child() {return child_; }
   3323   HBasicBlock* block() { return block_; }
   3324   HLoopInformation* loop() { return loop_; }
   3325   HBasicBlock* loop_header() { return loop_header_; }
   3326 
   3327   static PostorderProcessor* CreateEntryProcessor(Zone* zone,
   3328                                                   HBasicBlock* block) {
   3329     PostorderProcessor* result = new(zone) PostorderProcessor(NULL);
   3330     return result->SetupSuccessors(zone, block, NULL);
   3331   }
   3332 
   3333   PostorderProcessor* PerformStep(Zone* zone,
   3334                                   ZoneList<HBasicBlock*>* order) {
   3335     PostorderProcessor* next =
   3336         PerformNonBacktrackingStep(zone, order);
   3337     if (next != NULL) {
   3338       return next;
   3339     } else {
   3340       return Backtrack(zone, order);
   3341     }
   3342   }
   3343 
   3344  private:
   3345   explicit PostorderProcessor(PostorderProcessor* father)
   3346       : father_(father), child_(NULL), successor_iterator(NULL) { }
   3347 
   3348   // Each enum value states the cycle whose state is kept by this instance.
   3349   enum LoopKind {
   3350     NONE,
   3351     SUCCESSORS,
   3352     SUCCESSORS_OF_LOOP_HEADER,
   3353     LOOP_MEMBERS,
   3354     SUCCESSORS_OF_LOOP_MEMBER
   3355   };
   3356 
   3357   // Each "Setup..." method is like a constructor for a cycle state.
   3358   PostorderProcessor* SetupSuccessors(Zone* zone,
   3359                                       HBasicBlock* block,
   3360                                       HBasicBlock* loop_header) {
   3361     if (block == NULL || block->IsOrdered() ||
   3362         block->parent_loop_header() != loop_header) {
   3363       kind_ = NONE;
   3364       block_ = NULL;
   3365       loop_ = NULL;
   3366       loop_header_ = NULL;
   3367       return this;
   3368     } else {
   3369       block_ = block;
   3370       loop_ = NULL;
   3371       block->MarkAsOrdered();
   3372 
   3373       if (block->IsLoopHeader()) {
   3374         kind_ = SUCCESSORS_OF_LOOP_HEADER;
   3375         loop_header_ = block;
   3376         InitializeSuccessors();
   3377         PostorderProcessor* result = Push(zone);
   3378         return result->SetupLoopMembers(zone, block, block->loop_information(),
   3379                                         loop_header);
   3380       } else {
   3381         DCHECK(block->IsFinished());
   3382         kind_ = SUCCESSORS;
   3383         loop_header_ = loop_header;
   3384         InitializeSuccessors();
   3385         return this;
   3386       }
   3387     }
   3388   }
   3389 
   3390   PostorderProcessor* SetupLoopMembers(Zone* zone,
   3391                                        HBasicBlock* block,
   3392                                        HLoopInformation* loop,
   3393                                        HBasicBlock* loop_header) {
   3394     kind_ = LOOP_MEMBERS;
   3395     block_ = block;
   3396     loop_ = loop;
   3397     loop_header_ = loop_header;
   3398     InitializeLoopMembers();
   3399     return this;
   3400   }
   3401 
   3402   PostorderProcessor* SetupSuccessorsOfLoopMember(
   3403       HBasicBlock* block,
   3404       HLoopInformation* loop,
   3405       HBasicBlock* loop_header) {
   3406     kind_ = SUCCESSORS_OF_LOOP_MEMBER;
   3407     block_ = block;
   3408     loop_ = loop;
   3409     loop_header_ = loop_header;
   3410     InitializeSuccessors();
   3411     return this;
   3412   }
   3413 
   3414   // This method "allocates" a new stack frame.
   3415   PostorderProcessor* Push(Zone* zone) {
   3416     if (child_ == NULL) {
   3417       child_ = new(zone) PostorderProcessor(this);
   3418     }
   3419     return child_;
   3420   }
   3421 
   3422   void ClosePostorder(ZoneList<HBasicBlock*>* order, Zone* zone) {
   3423     DCHECK(block_->end()->FirstSuccessor() == NULL ||
   3424            order->Contains(block_->end()->FirstSuccessor()) ||
   3425            block_->end()->FirstSuccessor()->IsLoopHeader());
   3426     DCHECK(block_->end()->SecondSuccessor() == NULL ||
   3427            order->Contains(block_->end()->SecondSuccessor()) ||
   3428            block_->end()->SecondSuccessor()->IsLoopHeader());
   3429     order->Add(block_, zone);
   3430   }
   3431 
   3432   // This method is the basic block to walk up the stack.
   3433   PostorderProcessor* Pop(Zone* zone,
   3434                           ZoneList<HBasicBlock*>* order) {
   3435     switch (kind_) {
   3436       case SUCCESSORS:
   3437       case SUCCESSORS_OF_LOOP_HEADER:
   3438         ClosePostorder(order, zone);
   3439         return father_;
   3440       case LOOP_MEMBERS:
   3441         return father_;
   3442       case SUCCESSORS_OF_LOOP_MEMBER:
   3443         if (block()->IsLoopHeader() && block() != loop_->loop_header()) {
   3444           // In this case we need to perform a LOOP_MEMBERS cycle so we
   3445           // initialize it and return this instead of father.
   3446           return SetupLoopMembers(zone, block(),
   3447                                   block()->loop_information(), loop_header_);
   3448         } else {
   3449           return father_;
   3450         }
   3451       case NONE:
   3452         return father_;
   3453     }
   3454     UNREACHABLE();
   3455     return NULL;
   3456   }
   3457 
   3458   // Walks up the stack.
   3459   PostorderProcessor* Backtrack(Zone* zone,
   3460                                 ZoneList<HBasicBlock*>* order) {
   3461     PostorderProcessor* parent = Pop(zone, order);
   3462     while (parent != NULL) {
   3463       PostorderProcessor* next =
   3464           parent->PerformNonBacktrackingStep(zone, order);
   3465       if (next != NULL) {
   3466         return next;
   3467       } else {
   3468         parent = parent->Pop(zone, order);
   3469       }
   3470     }
   3471     return NULL;
   3472   }
   3473 
   3474   PostorderProcessor* PerformNonBacktrackingStep(
   3475       Zone* zone,
   3476       ZoneList<HBasicBlock*>* order) {
   3477     HBasicBlock* next_block;
   3478     switch (kind_) {
   3479       case SUCCESSORS:
   3480         next_block = AdvanceSuccessors();
   3481         if (next_block != NULL) {
   3482           PostorderProcessor* result = Push(zone);
   3483           return result->SetupSuccessors(zone, next_block, loop_header_);
   3484         }
   3485         break;
   3486       case SUCCESSORS_OF_LOOP_HEADER:
   3487         next_block = AdvanceSuccessors();
   3488         if (next_block != NULL) {
   3489           PostorderProcessor* result = Push(zone);
   3490           return result->SetupSuccessors(zone, next_block, block());
   3491         }
   3492         break;
   3493       case LOOP_MEMBERS:
   3494         next_block = AdvanceLoopMembers();
   3495         if (next_block != NULL) {
   3496           PostorderProcessor* result = Push(zone);
   3497           return result->SetupSuccessorsOfLoopMember(next_block,
   3498                                                      loop_, loop_header_);
   3499         }
   3500         break;
   3501       case SUCCESSORS_OF_LOOP_MEMBER:
   3502         next_block = AdvanceSuccessors();
   3503         if (next_block != NULL) {
   3504           PostorderProcessor* result = Push(zone);
   3505           return result->SetupSuccessors(zone, next_block, loop_header_);
   3506         }
   3507         break;
   3508       case NONE:
   3509         return NULL;
   3510     }
   3511     return NULL;
   3512   }
   3513 
   3514   // The following two methods implement a "foreach b in successors" cycle.
   3515   void InitializeSuccessors() {
   3516     loop_index = 0;
   3517     loop_length = 0;
   3518     successor_iterator = HSuccessorIterator(block_->end());
   3519   }
   3520 
   3521   HBasicBlock* AdvanceSuccessors() {
   3522     if (!successor_iterator.Done()) {
   3523       HBasicBlock* result = successor_iterator.Current();
   3524       successor_iterator.Advance();
   3525       return result;
   3526     }
   3527     return NULL;
   3528   }
   3529 
   3530   // The following two methods implement a "foreach b in loop members" cycle.
   3531   void InitializeLoopMembers() {
   3532     loop_index = 0;
   3533     loop_length = loop_->blocks()->length();
   3534   }
   3535 
   3536   HBasicBlock* AdvanceLoopMembers() {
   3537     if (loop_index < loop_length) {
   3538       HBasicBlock* result = loop_->blocks()->at(loop_index);
   3539       loop_index++;
   3540       return result;
   3541     } else {
   3542       return NULL;
   3543     }
   3544   }
   3545 
   3546   LoopKind kind_;
   3547   PostorderProcessor* father_;
   3548   PostorderProcessor* child_;
   3549   HLoopInformation* loop_;
   3550   HBasicBlock* block_;
   3551   HBasicBlock* loop_header_;
   3552   int loop_index;
   3553   int loop_length;
   3554   HSuccessorIterator successor_iterator;
   3555 };
   3556 
   3557 
   3558 void HGraph::OrderBlocks() {
   3559   CompilationPhase phase("H_Block ordering", info());
   3560 
   3561 #ifdef DEBUG
   3562   // Initially the blocks must not be ordered.
   3563   for (int i = 0; i < blocks_.length(); ++i) {
   3564     DCHECK(!blocks_[i]->IsOrdered());
   3565   }
   3566 #endif
   3567 
   3568   PostorderProcessor* postorder =
   3569       PostorderProcessor::CreateEntryProcessor(zone(), blocks_[0]);
   3570   blocks_.Rewind(0);
   3571   while (postorder) {
   3572     postorder = postorder->PerformStep(zone(), &blocks_);
   3573   }
   3574 
   3575 #ifdef DEBUG
   3576   // Now all blocks must be marked as ordered.
   3577   for (int i = 0; i < blocks_.length(); ++i) {
   3578     DCHECK(blocks_[i]->IsOrdered());
   3579   }
   3580 #endif
   3581 
   3582   // Reverse block list and assign block IDs.
   3583   for (int i = 0, j = blocks_.length(); --j >= i; ++i) {
   3584     HBasicBlock* bi = blocks_[i];
   3585     HBasicBlock* bj = blocks_[j];
   3586     bi->set_block_id(j);
   3587     bj->set_block_id(i);
   3588     blocks_[i] = bj;
   3589     blocks_[j] = bi;
   3590   }
   3591 }
   3592 
   3593 
   3594 void HGraph::AssignDominators() {
   3595   HPhase phase("H_Assign dominators", this);
   3596   for (int i = 0; i < blocks_.length(); ++i) {
   3597     HBasicBlock* block = blocks_[i];
   3598     if (block->IsLoopHeader()) {
   3599       // Only the first predecessor of a loop header is from outside the loop.
   3600       // All others are back edges, and thus cannot dominate the loop header.
   3601       block->AssignCommonDominator(block->predecessors()->first());
   3602       block->AssignLoopSuccessorDominators();
   3603     } else {
   3604       for (int j = blocks_[i]->predecessors()->length() - 1; j >= 0; --j) {
   3605         blocks_[i]->AssignCommonDominator(blocks_[i]->predecessors()->at(j));
   3606       }
   3607     }
   3608   }
   3609 }
   3610 
   3611 
   3612 bool HGraph::CheckArgumentsPhiUses() {
   3613   int block_count = blocks_.length();
   3614   for (int i = 0; i < block_count; ++i) {
   3615     for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
   3616       HPhi* phi = blocks_[i]->phis()->at(j);
   3617       // We don't support phi uses of arguments for now.
   3618       if (phi->CheckFlag(HValue::kIsArguments)) return false;
   3619     }
   3620   }
   3621   return true;
   3622 }
   3623 
   3624 
   3625 bool HGraph::CheckConstPhiUses() {
   3626   int block_count = blocks_.length();
   3627   for (int i = 0; i < block_count; ++i) {
   3628     for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
   3629       HPhi* phi = blocks_[i]->phis()->at(j);
   3630       // Check for the hole value (from an uninitialized const).
   3631       for (int k = 0; k < phi->OperandCount(); k++) {
   3632         if (phi->OperandAt(k) == GetConstantHole()) return false;
   3633       }
   3634     }
   3635   }
   3636   return true;
   3637 }
   3638 
   3639 
   3640 void HGraph::CollectPhis() {
   3641   int block_count = blocks_.length();
   3642   phi_list_ = new(zone()) ZoneList<HPhi*>(block_count, zone());
   3643   for (int i = 0; i < block_count; ++i) {
   3644     for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
   3645       HPhi* phi = blocks_[i]->phis()->at(j);
   3646       phi_list_->Add(phi, zone());
   3647     }
   3648   }
   3649 }
   3650 
   3651 
   3652 // Implementation of utility class to encapsulate the translation state for
   3653 // a (possibly inlined) function.
   3654 FunctionState::FunctionState(HOptimizedGraphBuilder* owner,
   3655                              CompilationInfo* info, InliningKind inlining_kind,
   3656                              int inlining_id, TailCallMode tail_call_mode)
   3657     : owner_(owner),
   3658       compilation_info_(info),
   3659       call_context_(NULL),
   3660       inlining_kind_(inlining_kind),
   3661       tail_call_mode_(tail_call_mode),
   3662       function_return_(NULL),
   3663       test_context_(NULL),
   3664       entry_(NULL),
   3665       arguments_object_(NULL),
   3666       arguments_elements_(NULL),
   3667       inlining_id_(inlining_id),
   3668       outer_source_position_(SourcePosition::Unknown()),
   3669       do_expression_scope_count_(0),
   3670       outer_(owner->function_state()) {
   3671   if (outer_ != NULL) {
   3672     // State for an inline function.
   3673     if (owner->ast_context()->IsTest()) {
   3674       HBasicBlock* if_true = owner->graph()->CreateBasicBlock();
   3675       HBasicBlock* if_false = owner->graph()->CreateBasicBlock();
   3676       if_true->MarkAsInlineReturnTarget(owner->current_block());
   3677       if_false->MarkAsInlineReturnTarget(owner->current_block());
   3678       TestContext* outer_test_context = TestContext::cast(owner->ast_context());
   3679       Expression* cond = outer_test_context->condition();
   3680       // The AstContext constructor pushed on the context stack.  This newed
   3681       // instance is the reason that AstContext can't be BASE_EMBEDDED.
   3682       test_context_ = new TestContext(owner, cond, if_true, if_false);
   3683     } else {
   3684       function_return_ = owner->graph()->CreateBasicBlock();
   3685       function_return()->MarkAsInlineReturnTarget(owner->current_block());
   3686     }
   3687     // Set this after possibly allocating a new TestContext above.
   3688     call_context_ = owner->ast_context();
   3689   }
   3690 
   3691   // Push on the state stack.
   3692   owner->set_function_state(this);
   3693 
   3694   if (owner->is_tracking_positions()) {
   3695     outer_source_position_ = owner->source_position();
   3696     owner->EnterInlinedSource(inlining_id);
   3697     owner->SetSourcePosition(info->shared_info()->start_position());
   3698   }
   3699 }
   3700 
   3701 
   3702 FunctionState::~FunctionState() {
   3703   delete test_context_;
   3704   owner_->set_function_state(outer_);
   3705 
   3706   if (owner_->is_tracking_positions()) {
   3707     owner_->set_source_position(outer_source_position_);
   3708     owner_->EnterInlinedSource(outer_->inlining_id());
   3709   }
   3710 }
   3711 
   3712 
   3713 // Implementation of utility classes to represent an expression's context in
   3714 // the AST.
   3715 AstContext::AstContext(HOptimizedGraphBuilder* owner, Expression::Context kind)
   3716     : owner_(owner),
   3717       kind_(kind),
   3718       outer_(owner->ast_context()),
   3719       typeof_mode_(NOT_INSIDE_TYPEOF) {
   3720   owner->set_ast_context(this);  // Push.
   3721 #ifdef DEBUG
   3722   DCHECK_EQ(JS_FUNCTION, owner->environment()->frame_type());
   3723   original_length_ = owner->environment()->length();
   3724 #endif
   3725 }
   3726 
   3727 
   3728 AstContext::~AstContext() {
   3729   owner_->set_ast_context(outer_);  // Pop.
   3730 }
   3731 
   3732 
   3733 EffectContext::~EffectContext() {
   3734   DCHECK(owner()->HasStackOverflow() || owner()->current_block() == NULL ||
   3735          (owner()->environment()->length() == original_length_ &&
   3736           (owner()->environment()->frame_type() == JS_FUNCTION ||
   3737            owner()->environment()->frame_type() == TAIL_CALLER_FUNCTION)));
   3738 }
   3739 
   3740 
   3741 ValueContext::~ValueContext() {
   3742   DCHECK(owner()->HasStackOverflow() || owner()->current_block() == NULL ||
   3743          (owner()->environment()->length() == original_length_ + 1 &&
   3744           (owner()->environment()->frame_type() == JS_FUNCTION ||
   3745            owner()->environment()->frame_type() == TAIL_CALLER_FUNCTION)));
   3746 }
   3747 
   3748 
   3749 void EffectContext::ReturnValue(HValue* value) {
   3750   // The value is simply ignored.
   3751 }
   3752 
   3753 
   3754 void ValueContext::ReturnValue(HValue* value) {
   3755   // The value is tracked in the bailout environment, and communicated
   3756   // through the environment as the result of the expression.
   3757   if (value->CheckFlag(HValue::kIsArguments)) {
   3758     if (flag_ == ARGUMENTS_FAKED) {
   3759       value = owner()->graph()->GetConstantUndefined();
   3760     } else if (!arguments_allowed()) {
   3761       owner()->Bailout(kBadValueContextForArgumentsValue);
   3762     }
   3763   }
   3764   owner()->Push(value);
   3765 }
   3766 
   3767 
   3768 void TestContext::ReturnValue(HValue* value) {
   3769   BuildBranch(value);
   3770 }
   3771 
   3772 
   3773 void EffectContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
   3774   DCHECK(!instr->IsControlInstruction());
   3775   owner()->AddInstruction(instr);
   3776   if (instr->HasObservableSideEffects()) {
   3777     owner()->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
   3778   }
   3779 }
   3780 
   3781 
   3782 void EffectContext::ReturnControl(HControlInstruction* instr,
   3783                                   BailoutId ast_id) {
   3784   DCHECK(!instr->HasObservableSideEffects());
   3785   HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock();
   3786   HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock();
   3787   instr->SetSuccessorAt(0, empty_true);
   3788   instr->SetSuccessorAt(1, empty_false);
   3789   owner()->FinishCurrentBlock(instr);
   3790   HBasicBlock* join = owner()->CreateJoin(empty_true, empty_false, ast_id);
   3791   owner()->set_current_block(join);
   3792 }
   3793 
   3794 
   3795 void EffectContext::ReturnContinuation(HIfContinuation* continuation,
   3796                                        BailoutId ast_id) {
   3797   HBasicBlock* true_branch = NULL;
   3798   HBasicBlock* false_branch = NULL;
   3799   continuation->Continue(&true_branch, &false_branch);
   3800   if (!continuation->IsTrueReachable()) {
   3801     owner()->set_current_block(false_branch);
   3802   } else if (!continuation->IsFalseReachable()) {
   3803     owner()->set_current_block(true_branch);
   3804   } else {
   3805     HBasicBlock* join = owner()->CreateJoin(true_branch, false_branch, ast_id);
   3806     owner()->set_current_block(join);
   3807   }
   3808 }
   3809 
   3810 
   3811 void ValueContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
   3812   DCHECK(!instr->IsControlInstruction());
   3813   if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) {
   3814     return owner()->Bailout(kBadValueContextForArgumentsObjectValue);
   3815   }
   3816   owner()->AddInstruction(instr);
   3817   owner()->Push(instr);
   3818   if (instr->HasObservableSideEffects()) {
   3819     owner()->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
   3820   }
   3821 }
   3822 
   3823 
   3824 void ValueContext::ReturnControl(HControlInstruction* instr, BailoutId ast_id) {
   3825   DCHECK(!instr->HasObservableSideEffects());
   3826   if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) {
   3827     return owner()->Bailout(kBadValueContextForArgumentsObjectValue);
   3828   }
   3829   HBasicBlock* materialize_false = owner()->graph()->CreateBasicBlock();
   3830   HBasicBlock* materialize_true = owner()->graph()->CreateBasicBlock();
   3831   instr->SetSuccessorAt(0, materialize_true);
   3832   instr->SetSuccessorAt(1, materialize_false);
   3833   owner()->FinishCurrentBlock(instr);
   3834   owner()->set_current_block(materialize_true);
   3835   owner()->Push(owner()->graph()->GetConstantTrue());
   3836   owner()->set_current_block(materialize_false);
   3837   owner()->Push(owner()->graph()->GetConstantFalse());
   3838   HBasicBlock* join =
   3839     owner()->CreateJoin(materialize_true, materialize_false, ast_id);
   3840   owner()->set_current_block(join);
   3841 }
   3842 
   3843 
   3844 void ValueContext::ReturnContinuation(HIfContinuation* continuation,
   3845                                       BailoutId ast_id) {
   3846   HBasicBlock* materialize_true = NULL;
   3847   HBasicBlock* materialize_false = NULL;
   3848   continuation->Continue(&materialize_true, &materialize_false);
   3849   if (continuation->IsTrueReachable()) {
   3850     owner()->set_current_block(materialize_true);
   3851     owner()->Push(owner()->graph()->GetConstantTrue());
   3852     owner()->set_current_block(materialize_true);
   3853   }
   3854   if (continuation->IsFalseReachable()) {
   3855     owner()->set_current_block(materialize_false);
   3856     owner()->Push(owner()->graph()->GetConstantFalse());
   3857     owner()->set_current_block(materialize_false);
   3858   }
   3859   if (continuation->TrueAndFalseReachable()) {
   3860     HBasicBlock* join =
   3861         owner()->CreateJoin(materialize_true, materialize_false, ast_id);
   3862     owner()->set_current_block(join);
   3863   }
   3864 }
   3865 
   3866 
   3867 void TestContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
   3868   DCHECK(!instr->IsControlInstruction());
   3869   HOptimizedGraphBuilder* builder = owner();
   3870   builder->AddInstruction(instr);
   3871   // We expect a simulate after every expression with side effects, though
   3872   // this one isn't actually needed (and wouldn't work if it were targeted).
   3873   if (instr->HasObservableSideEffects()) {
   3874     builder->Push(instr);
   3875     builder->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
   3876     builder->Pop();
   3877   }
   3878   BuildBranch(instr);
   3879 }
   3880 
   3881 
   3882 void TestContext::ReturnControl(HControlInstruction* instr, BailoutId ast_id) {
   3883   DCHECK(!instr->HasObservableSideEffects());
   3884   HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock();
   3885   HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock();
   3886   instr->SetSuccessorAt(0, empty_true);
   3887   instr->SetSuccessorAt(1, empty_false);
   3888   owner()->FinishCurrentBlock(instr);
   3889   owner()->Goto(empty_true, if_true(), owner()->function_state());
   3890   owner()->Goto(empty_false, if_false(), owner()->function_state());
   3891   owner()->set_current_block(NULL);
   3892 }
   3893 
   3894 
   3895 void TestContext::ReturnContinuation(HIfContinuation* continuation,
   3896                                      BailoutId ast_id) {
   3897   HBasicBlock* true_branch = NULL;
   3898   HBasicBlock* false_branch = NULL;
   3899   continuation->Continue(&true_branch, &false_branch);
   3900   if (continuation->IsTrueReachable()) {
   3901     owner()->Goto(true_branch, if_true(), owner()->function_state());
   3902   }
   3903   if (continuation->IsFalseReachable()) {
   3904     owner()->Goto(false_branch, if_false(), owner()->function_state());
   3905   }
   3906   owner()->set_current_block(NULL);
   3907 }
   3908 
   3909 
   3910 void TestContext::BuildBranch(HValue* value) {
   3911   // We expect the graph to be in edge-split form: there is no edge that
   3912   // connects a branch node to a join node.  We conservatively ensure that
   3913   // property by always adding an empty block on the outgoing edges of this
   3914   // branch.
   3915   HOptimizedGraphBuilder* builder = owner();
   3916   if (value != NULL && value->CheckFlag(HValue::kIsArguments)) {
   3917     builder->Bailout(kArgumentsObjectValueInATestContext);
   3918   }
   3919   ToBooleanHints expected(condition()->to_boolean_types());
   3920   ReturnControl(owner()->New<HBranch>(value, expected), BailoutId::None());
   3921 }
   3922 
   3923 
   3924 // HOptimizedGraphBuilder infrastructure for bailing out and checking bailouts.
   3925 #define CHECK_BAILOUT(call)                     \
   3926   do {                                          \
   3927     call;                                       \
   3928     if (HasStackOverflow()) return;             \
   3929   } while (false)
   3930 
   3931 
   3932 #define CHECK_ALIVE(call)                                       \
   3933   do {                                                          \
   3934     call;                                                       \
   3935     if (HasStackOverflow() || current_block() == NULL) return;  \
   3936   } while (false)
   3937 
   3938 
   3939 #define CHECK_ALIVE_OR_RETURN(call, value)                            \
   3940   do {                                                                \
   3941     call;                                                             \
   3942     if (HasStackOverflow() || current_block() == NULL) return value;  \
   3943   } while (false)
   3944 
   3945 
   3946 void HOptimizedGraphBuilder::Bailout(BailoutReason reason) {
   3947   current_info()->AbortOptimization(reason);
   3948   SetStackOverflow();
   3949 }
   3950 
   3951 
   3952 void HOptimizedGraphBuilder::VisitForEffect(Expression* expr) {
   3953   EffectContext for_effect(this);
   3954   Visit(expr);
   3955 }
   3956 
   3957 
   3958 void HOptimizedGraphBuilder::VisitForValue(Expression* expr,
   3959                                            ArgumentsAllowedFlag flag) {
   3960   ValueContext for_value(this, flag);
   3961   Visit(expr);
   3962 }
   3963 
   3964 
   3965 void HOptimizedGraphBuilder::VisitForTypeOf(Expression* expr) {
   3966   ValueContext for_value(this, ARGUMENTS_NOT_ALLOWED);
   3967   for_value.set_typeof_mode(INSIDE_TYPEOF);
   3968   Visit(expr);
   3969 }
   3970 
   3971 
   3972 void HOptimizedGraphBuilder::VisitForControl(Expression* expr,
   3973                                              HBasicBlock* true_block,
   3974                                              HBasicBlock* false_block) {
   3975   TestContext for_control(this, expr, true_block, false_block);
   3976   Visit(expr);
   3977 }
   3978 
   3979 
   3980 void HOptimizedGraphBuilder::VisitExpressions(
   3981     ZoneList<Expression*>* exprs) {
   3982   for (int i = 0; i < exprs->length(); ++i) {
   3983     CHECK_ALIVE(VisitForValue(exprs->at(i)));
   3984   }
   3985 }
   3986 
   3987 
   3988 void HOptimizedGraphBuilder::VisitExpressions(ZoneList<Expression*>* exprs,
   3989                                               ArgumentsAllowedFlag flag) {
   3990   for (int i = 0; i < exprs->length(); ++i) {
   3991     CHECK_ALIVE(VisitForValue(exprs->at(i), flag));
   3992   }
   3993 }
   3994 
   3995 
   3996 bool HOptimizedGraphBuilder::BuildGraph() {
   3997   if (IsSubclassConstructor(current_info()->literal()->kind())) {
   3998     Bailout(kSuperReference);
   3999     return false;
   4000   }
   4001 
   4002   DeclarationScope* scope = current_info()->scope();
   4003   SetUpScope(scope);
   4004 
   4005   // Add an edge to the body entry.  This is warty: the graph's start
   4006   // environment will be used by the Lithium translation as the initial
   4007   // environment on graph entry, but it has now been mutated by the
   4008   // Hydrogen translation of the instructions in the start block.  This
   4009   // environment uses values which have not been defined yet.  These
   4010   // Hydrogen instructions will then be replayed by the Lithium
   4011   // translation, so they cannot have an environment effect.  The edge to
   4012   // the body's entry block (along with some special logic for the start
   4013   // block in HInstruction::InsertAfter) seals the start block from
   4014   // getting unwanted instructions inserted.
   4015   //
   4016   // TODO(kmillikin): Fix this.  Stop mutating the initial environment.
   4017   // Make the Hydrogen instructions in the initial block into Hydrogen
   4018   // values (but not instructions), present in the initial environment and
   4019   // not replayed by the Lithium translation.
   4020   HEnvironment* initial_env = environment()->CopyWithoutHistory();
   4021   HBasicBlock* body_entry = CreateBasicBlock(initial_env);
   4022   Goto(body_entry);
   4023   body_entry->SetJoinId(BailoutId::FunctionEntry());
   4024   set_current_block(body_entry);
   4025 
   4026   VisitDeclarations(scope->declarations());
   4027   Add<HSimulate>(BailoutId::Declarations());
   4028 
   4029   Add<HStackCheck>(HStackCheck::kFunctionEntry);
   4030 
   4031   VisitStatements(current_info()->literal()->body());
   4032   if (HasStackOverflow()) return false;
   4033 
   4034   if (current_block() != NULL) {
   4035     Add<HReturn>(graph()->GetConstantUndefined());
   4036     set_current_block(NULL);
   4037   }
   4038 
   4039   // If the checksum of the number of type info changes is the same as the
   4040   // last time this function was compiled, then this recompile is likely not
   4041   // due to missing/inadequate type feedback, but rather too aggressive
   4042   // optimization. Disable optimistic LICM in that case.
   4043   Handle<Code> unoptimized_code(current_info()->shared_info()->code());
   4044   DCHECK(unoptimized_code->kind() == Code::FUNCTION);
   4045   Handle<TypeFeedbackInfo> type_info(
   4046       TypeFeedbackInfo::cast(unoptimized_code->type_feedback_info()));
   4047   int checksum = type_info->own_type_change_checksum();
   4048   int composite_checksum = graph()->update_type_change_checksum(checksum);
   4049   graph()->set_use_optimistic_licm(
   4050       !type_info->matches_inlined_type_change_checksum(composite_checksum));
   4051   type_info->set_inlined_type_change_checksum(composite_checksum);
   4052 
   4053   // Set this predicate early to avoid handle deref during graph optimization.
   4054   graph()->set_allow_code_motion(
   4055       current_info()->IsStub() ||
   4056       current_info()->shared_info()->opt_count() + 1 < FLAG_max_opt_count);
   4057 
   4058   // Perform any necessary OSR-specific cleanups or changes to the graph.
   4059   osr()->FinishGraph();
   4060 
   4061   return true;
   4062 }
   4063 
   4064 
   4065 bool HGraph::Optimize(BailoutReason* bailout_reason) {
   4066   OrderBlocks();
   4067   AssignDominators();
   4068 
   4069   // We need to create a HConstant "zero" now so that GVN will fold every
   4070   // zero-valued constant in the graph together.
   4071   // The constant is needed to make idef-based bounds check work: the pass
   4072   // evaluates relations with "zero" and that zero cannot be created after GVN.
   4073   GetConstant0();
   4074 
   4075 #ifdef DEBUG
   4076   // Do a full verify after building the graph and computing dominators.
   4077   Verify(true);
   4078 #endif
   4079 
   4080   if (FLAG_analyze_environment_liveness && maximum_environment_size() != 0) {
   4081     Run<HEnvironmentLivenessAnalysisPhase>();
   4082   }
   4083 
   4084   if (!CheckConstPhiUses()) {
   4085     *bailout_reason = kUnsupportedPhiUseOfConstVariable;
   4086     return false;
   4087   }
   4088   Run<HRedundantPhiEliminationPhase>();
   4089   if (!CheckArgumentsPhiUses()) {
   4090     *bailout_reason = kUnsupportedPhiUseOfArguments;
   4091     return false;
   4092   }
   4093 
   4094   // Find and mark unreachable code to simplify optimizations, especially gvn,
   4095   // where unreachable code could unnecessarily defeat LICM.
   4096   Run<HMarkUnreachableBlocksPhase>();
   4097 
   4098   if (FLAG_dead_code_elimination) Run<HDeadCodeEliminationPhase>();
   4099   if (FLAG_use_escape_analysis) Run<HEscapeAnalysisPhase>();
   4100 
   4101   if (FLAG_load_elimination) Run<HLoadEliminationPhase>();
   4102 
   4103   CollectPhis();
   4104 
   4105   if (has_osr()) osr()->FinishOsrValues();
   4106 
   4107   Run<HInferRepresentationPhase>();
   4108 
   4109   // Remove HSimulate instructions that have turned out not to be needed
   4110   // after all by folding them into the following HSimulate.
   4111   // This must happen after inferring representations.
   4112   Run<HMergeRemovableSimulatesPhase>();
   4113 
   4114   Run<HRepresentationChangesPhase>();
   4115 
   4116   Run<HInferTypesPhase>();
   4117 
   4118   // Must be performed before canonicalization to ensure that Canonicalize
   4119   // will not remove semantically meaningful ToInt32 operations e.g. BIT_OR with
   4120   // zero.
   4121   Run<HUint32AnalysisPhase>();
   4122 
   4123   if (FLAG_use_canonicalizing) Run<HCanonicalizePhase>();
   4124 
   4125   if (FLAG_use_gvn) Run<HGlobalValueNumberingPhase>();
   4126 
   4127   if (FLAG_check_elimination) Run<HCheckEliminationPhase>();
   4128 
   4129   if (FLAG_store_elimination) Run<HStoreEliminationPhase>();
   4130 
   4131   Run<HRangeAnalysisPhase>();
   4132 
   4133   // Eliminate redundant stack checks on backwards branches.
   4134   Run<HStackCheckEliminationPhase>();
   4135 
   4136   if (FLAG_array_bounds_checks_elimination) Run<HBoundsCheckEliminationPhase>();
   4137   if (FLAG_array_index_dehoisting) Run<HDehoistIndexComputationsPhase>();
   4138   if (FLAG_dead_code_elimination) Run<HDeadCodeEliminationPhase>();
   4139 
   4140   RestoreActualValues();
   4141 
   4142   // Find unreachable code a second time, GVN and other optimizations may have
   4143   // made blocks unreachable that were previously reachable.
   4144   Run<HMarkUnreachableBlocksPhase>();
   4145 
   4146   return true;
   4147 }
   4148 
   4149 
   4150 void HGraph::RestoreActualValues() {
   4151   HPhase phase("H_Restore actual values", this);
   4152 
   4153   for (int block_index = 0; block_index < blocks()->length(); block_index++) {
   4154     HBasicBlock* block = blocks()->at(block_index);
   4155 
   4156 #ifdef DEBUG
   4157     for (int i = 0; i < block->phis()->length(); i++) {
   4158       HPhi* phi = block->phis()->at(i);
   4159       DCHECK(phi->ActualValue() == phi);
   4160     }
   4161 #endif
   4162 
   4163     for (HInstructionIterator it(block); !it.Done(); it.Advance()) {
   4164       HInstruction* instruction = it.Current();
   4165       if (instruction->ActualValue() == instruction) continue;
   4166       if (instruction->CheckFlag(HValue::kIsDead)) {
   4167         // The instruction was marked as deleted but left in the graph
   4168         // as a control flow dependency point for subsequent
   4169         // instructions.
   4170         instruction->DeleteAndReplaceWith(instruction->ActualValue());
   4171       } else {
   4172         DCHECK(instruction->IsInformativeDefinition());
   4173         if (instruction->IsPurelyInformativeDefinition()) {
   4174           instruction->DeleteAndReplaceWith(instruction->RedefinedOperand());
   4175         } else {
   4176           instruction->ReplaceAllUsesWith(instruction->ActualValue());
   4177         }
   4178       }
   4179     }
   4180   }
   4181 }
   4182 
   4183 
   4184 void HOptimizedGraphBuilder::PushArgumentsFromEnvironment(int count) {
   4185   ZoneList<HValue*> arguments(count, zone());
   4186   for (int i = 0; i < count; ++i) {
   4187     arguments.Add(Pop(), zone());
   4188   }
   4189 
   4190   HPushArguments* push_args = New<HPushArguments>();
   4191   while (!arguments.is_empty()) {
   4192     push_args->AddInput(arguments.RemoveLast());
   4193   }
   4194   AddInstruction(push_args);
   4195 }
   4196 
   4197 
   4198 template <class Instruction>
   4199 HInstruction* HOptimizedGraphBuilder::PreProcessCall(Instruction* call) {
   4200   PushArgumentsFromEnvironment(call->argument_count());
   4201   return call;
   4202 }
   4203 
   4204 void HOptimizedGraphBuilder::SetUpScope(DeclarationScope* scope) {
   4205   HEnvironment* prolog_env = environment();
   4206   int parameter_count = environment()->parameter_count();
   4207   ZoneList<HValue*> parameters(parameter_count, zone());
   4208   for (int i = 0; i < parameter_count; ++i) {
   4209     HInstruction* parameter = Add<HParameter>(static_cast<unsigned>(i));
   4210     parameters.Add(parameter, zone());
   4211     environment()->Bind(i, parameter);
   4212   }
   4213 
   4214   HConstant* undefined_constant = graph()->GetConstantUndefined();
   4215   // Initialize specials and locals to undefined.
   4216   for (int i = parameter_count + 1; i < environment()->length(); ++i) {
   4217     environment()->Bind(i, undefined_constant);
   4218   }
   4219   Add<HPrologue>();
   4220 
   4221   HEnvironment* initial_env = environment()->CopyWithoutHistory();
   4222   HBasicBlock* body_entry = CreateBasicBlock(initial_env);
   4223   GotoNoSimulate(body_entry);
   4224   set_current_block(body_entry);
   4225 
   4226   // Initialize context of prolog environment to undefined.
   4227   prolog_env->BindContext(undefined_constant);
   4228 
   4229   // First special is HContext.
   4230   HInstruction* context = Add<HContext>();
   4231   environment()->BindContext(context);
   4232 
   4233   // Create an arguments object containing the initial parameters.  Set the
   4234   // initial values of parameters including "this" having parameter index 0.
   4235   DCHECK_EQ(scope->num_parameters() + 1, parameter_count);
   4236   HArgumentsObject* arguments_object = New<HArgumentsObject>(parameter_count);
   4237   for (int i = 0; i < parameter_count; ++i) {
   4238     HValue* parameter = parameters.at(i);
   4239     arguments_object->AddArgument(parameter, zone());
   4240   }
   4241 
   4242   AddInstruction(arguments_object);
   4243 
   4244   // Handle the arguments and arguments shadow variables specially (they do
   4245   // not have declarations).
   4246   if (scope->arguments() != NULL) {
   4247     environment()->Bind(scope->arguments(), arguments_object);
   4248   }
   4249 
   4250   if (scope->rest_parameter() != nullptr) {
   4251     return Bailout(kRestParameter);
   4252   }
   4253 
   4254   if (scope->this_function_var() != nullptr ||
   4255       scope->new_target_var() != nullptr) {
   4256     return Bailout(kSuperReference);
   4257   }
   4258 
   4259   // Trace the call.
   4260   if (FLAG_trace && top_info()->IsOptimizing()) {
   4261     Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kTraceEnter), 0);
   4262   }
   4263 }
   4264 
   4265 
   4266 void HOptimizedGraphBuilder::VisitStatements(ZoneList<Statement*>* statements) {
   4267   for (int i = 0; i < statements->length(); i++) {
   4268     Statement* stmt = statements->at(i);
   4269     CHECK_ALIVE(Visit(stmt));
   4270     if (stmt->IsJump()) break;
   4271   }
   4272 }
   4273 
   4274 
   4275 void HOptimizedGraphBuilder::VisitBlock(Block* stmt) {
   4276   DCHECK(!HasStackOverflow());
   4277   DCHECK(current_block() != NULL);
   4278   DCHECK(current_block()->HasPredecessor());
   4279 
   4280   Scope* outer_scope = scope();
   4281   Scope* scope = stmt->scope();
   4282   BreakAndContinueInfo break_info(stmt, outer_scope);
   4283 
   4284   { BreakAndContinueScope push(&break_info, this);
   4285     if (scope != NULL) {
   4286       if (scope->NeedsContext()) {
   4287         // Load the function object.
   4288         DeclarationScope* declaration_scope = scope->GetDeclarationScope();
   4289         HInstruction* function;
   4290         HValue* outer_context = environment()->context();
   4291         if (declaration_scope->is_script_scope() ||
   4292             declaration_scope->is_eval_scope()) {
   4293           function = new (zone())
   4294               HLoadContextSlot(outer_context, Context::CLOSURE_INDEX,
   4295                                HLoadContextSlot::kNoCheck);
   4296         } else {
   4297           function = New<HThisFunction>();
   4298         }
   4299         AddInstruction(function);
   4300         // Allocate a block context and store it to the stack frame.
   4301         HValue* scope_info = Add<HConstant>(scope->scope_info());
   4302         Add<HPushArguments>(scope_info, function);
   4303         HInstruction* inner_context = Add<HCallRuntime>(
   4304             Runtime::FunctionForId(Runtime::kPushBlockContext), 2);
   4305         inner_context->SetFlag(HValue::kHasNoObservableSideEffects);
   4306         set_scope(scope);
   4307         environment()->BindContext(inner_context);
   4308       }
   4309       VisitDeclarations(scope->declarations());
   4310       AddSimulate(stmt->DeclsId(), REMOVABLE_SIMULATE);
   4311     }
   4312     CHECK_BAILOUT(VisitStatements(stmt->statements()));
   4313   }
   4314   set_scope(outer_scope);
   4315   if (scope != NULL && current_block() != NULL &&
   4316       scope->ContextLocalCount() > 0) {
   4317     HValue* inner_context = environment()->context();
   4318     HValue* outer_context = Add<HLoadNamedField>(
   4319         inner_context, nullptr,
   4320         HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
   4321 
   4322     environment()->BindContext(outer_context);
   4323   }
   4324   HBasicBlock* break_block = break_info.break_block();
   4325   if (break_block != NULL) {
   4326     if (current_block() != NULL) Goto(break_block);
   4327     break_block->SetJoinId(stmt->ExitId());
   4328     set_current_block(break_block);
   4329   }
   4330 }
   4331 
   4332 
   4333 void HOptimizedGraphBuilder::VisitExpressionStatement(
   4334     ExpressionStatement* stmt) {
   4335   DCHECK(!HasStackOverflow());
   4336   DCHECK(current_block() != NULL);
   4337   DCHECK(current_block()->HasPredecessor());
   4338   VisitForEffect(stmt->expression());
   4339 }
   4340 
   4341 
   4342 void HOptimizedGraphBuilder::VisitEmptyStatement(EmptyStatement* stmt) {
   4343   DCHECK(!HasStackOverflow());
   4344   DCHECK(current_block() != NULL);
   4345   DCHECK(current_block()->HasPredecessor());
   4346 }
   4347 
   4348 
   4349 void HOptimizedGraphBuilder::VisitSloppyBlockFunctionStatement(
   4350     SloppyBlockFunctionStatement* stmt) {
   4351   Visit(stmt->statement());
   4352 }
   4353 
   4354 
   4355 void HOptimizedGraphBuilder::VisitIfStatement(IfStatement* stmt) {
   4356   DCHECK(!HasStackOverflow());
   4357   DCHECK(current_block() != NULL);
   4358   DCHECK(current_block()->HasPredecessor());
   4359   if (stmt->condition()->ToBooleanIsTrue()) {
   4360     Add<HSimulate>(stmt->ThenId());
   4361     Visit(stmt->then_statement());
   4362   } else if (stmt->condition()->ToBooleanIsFalse()) {
   4363     Add<HSimulate>(stmt->ElseId());
   4364     Visit(stmt->else_statement());
   4365   } else {
   4366     HBasicBlock* cond_true = graph()->CreateBasicBlock();
   4367     HBasicBlock* cond_false = graph()->CreateBasicBlock();
   4368     CHECK_BAILOUT(VisitForControl(stmt->condition(), cond_true, cond_false));
   4369 
   4370     // Technically, we should be able to handle the case when one side of
   4371     // the test is not connected, but this can trip up liveness analysis
   4372     // if we did not fully connect the test context based on some optimistic
   4373     // assumption. If such an assumption was violated, we would end up with
   4374     // an environment with optimized-out values. So we should always
   4375     // conservatively connect the test context.
   4376     CHECK(cond_true->HasPredecessor());
   4377     CHECK(cond_false->HasPredecessor());
   4378 
   4379     cond_true->SetJoinId(stmt->ThenId());
   4380     set_current_block(cond_true);
   4381     CHECK_BAILOUT(Visit(stmt->then_statement()));
   4382     cond_true = current_block();
   4383 
   4384     cond_false->SetJoinId(stmt->ElseId());
   4385     set_current_block(cond_false);
   4386     CHECK_BAILOUT(Visit(stmt->else_statement()));
   4387     cond_false = current_block();
   4388 
   4389     HBasicBlock* join = CreateJoin(cond_true, cond_false, stmt->IfId());
   4390     set_current_block(join);
   4391   }
   4392 }
   4393 
   4394 
   4395 HBasicBlock* HOptimizedGraphBuilder::BreakAndContinueScope::Get(
   4396     BreakableStatement* stmt,
   4397     BreakType type,
   4398     Scope** scope,
   4399     int* drop_extra) {
   4400   *drop_extra = 0;
   4401   BreakAndContinueScope* current = this;
   4402   while (current != NULL && current->info()->target() != stmt) {
   4403     *drop_extra += current->info()->drop_extra();
   4404     current = current->next();
   4405   }
   4406   DCHECK(current != NULL);  // Always found (unless stack is malformed).
   4407   *scope = current->info()->scope();
   4408 
   4409   if (type == BREAK) {
   4410     *drop_extra += current->info()->drop_extra();
   4411   }
   4412 
   4413   HBasicBlock* block = NULL;
   4414   switch (type) {
   4415     case BREAK:
   4416       block = current->info()->break_block();
   4417       if (block == NULL) {
   4418         block = current->owner()->graph()->CreateBasicBlock();
   4419         current->info()->set_break_block(block);
   4420       }
   4421       break;
   4422 
   4423     case CONTINUE:
   4424       block = current->info()->continue_block();
   4425       if (block == NULL) {
   4426         block = current->owner()->graph()->CreateBasicBlock();
   4427         current->info()->set_continue_block(block);
   4428       }
   4429       break;
   4430   }
   4431 
   4432   return block;
   4433 }
   4434 
   4435 
   4436 void HOptimizedGraphBuilder::VisitContinueStatement(
   4437     ContinueStatement* stmt) {
   4438   DCHECK(!HasStackOverflow());
   4439   DCHECK(current_block() != NULL);
   4440   DCHECK(current_block()->HasPredecessor());
   4441 
   4442   if (function_state()->IsInsideDoExpressionScope()) {
   4443     return Bailout(kDoExpressionUnmodelable);
   4444   }
   4445 
   4446   Scope* outer_scope = NULL;
   4447   Scope* inner_scope = scope();
   4448   int drop_extra = 0;
   4449   HBasicBlock* continue_block = break_scope()->Get(
   4450       stmt->target(), BreakAndContinueScope::CONTINUE,
   4451       &outer_scope, &drop_extra);
   4452   HValue* context = environment()->context();
   4453   Drop(drop_extra);
   4454   int context_pop_count = inner_scope->ContextChainLength(outer_scope);
   4455   if (context_pop_count > 0) {
   4456     while (context_pop_count-- > 0) {
   4457       HInstruction* context_instruction = Add<HLoadNamedField>(
   4458           context, nullptr,
   4459           HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
   4460       context = context_instruction;
   4461     }
   4462     environment()->BindContext(context);
   4463   }
   4464 
   4465   Goto(continue_block);
   4466   set_current_block(NULL);
   4467 }
   4468 
   4469 
   4470 void HOptimizedGraphBuilder::VisitBreakStatement(BreakStatement* stmt) {
   4471   DCHECK(!HasStackOverflow());
   4472   DCHECK(current_block() != NULL);
   4473   DCHECK(current_block()->HasPredecessor());
   4474 
   4475   if (function_state()->IsInsideDoExpressionScope()) {
   4476     return Bailout(kDoExpressionUnmodelable);
   4477   }
   4478 
   4479   Scope* outer_scope = NULL;
   4480   Scope* inner_scope = scope();
   4481   int drop_extra = 0;
   4482   HBasicBlock* break_block = break_scope()->Get(
   4483       stmt->target(), BreakAndContinueScope::BREAK,
   4484       &outer_scope, &drop_extra);
   4485   HValue* context = environment()->context();
   4486   Drop(drop_extra);
   4487   int context_pop_count = inner_scope->ContextChainLength(outer_scope);
   4488   if (context_pop_count > 0) {
   4489     while (context_pop_count-- > 0) {
   4490       HInstruction* context_instruction = Add<HLoadNamedField>(
   4491           context, nullptr,
   4492           HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
   4493       context = context_instruction;
   4494     }
   4495     environment()->BindContext(context);
   4496   }
   4497   Goto(break_block);
   4498   set_current_block(NULL);
   4499 }
   4500 
   4501 
   4502 void HOptimizedGraphBuilder::VisitReturnStatement(ReturnStatement* stmt) {
   4503   DCHECK(!HasStackOverflow());
   4504   DCHECK(current_block() != NULL);
   4505   DCHECK(current_block()->HasPredecessor());
   4506   FunctionState* state = function_state();
   4507   AstContext* context = call_context();
   4508   if (context == NULL) {
   4509     // Not an inlined return, so an actual one.
   4510     CHECK_ALIVE(VisitForValue(stmt->expression()));
   4511     HValue* result = environment()->Pop();
   4512     Add<HReturn>(result);
   4513   } else if (state->inlining_kind() == CONSTRUCT_CALL_RETURN) {
   4514     // Return from an inlined construct call. In a test context the return value
   4515     // will always evaluate to true, in a value context the return value needs
   4516     // to be a JSObject.
   4517     if (context->IsTest()) {
   4518       CHECK_ALIVE(VisitForEffect(stmt->expression()));
   4519       context->ReturnValue(graph()->GetConstantTrue());
   4520     } else if (context->IsEffect()) {
   4521       CHECK_ALIVE(VisitForEffect(stmt->expression()));
   4522       Goto(function_return(), state);
   4523     } else {
   4524       DCHECK(context->IsValue());
   4525       CHECK_ALIVE(VisitForValue(stmt->expression()));
   4526       HValue* return_value = Pop();
   4527       HValue* receiver = environment()->arguments_environment()->Lookup(0);
   4528       HHasInstanceTypeAndBranch* typecheck =
   4529           New<HHasInstanceTypeAndBranch>(return_value,
   4530                                          FIRST_JS_RECEIVER_TYPE,
   4531                                          LAST_JS_RECEIVER_TYPE);
   4532       HBasicBlock* if_spec_object = graph()->CreateBasicBlock();
   4533       HBasicBlock* not_spec_object = graph()->CreateBasicBlock();
   4534       typecheck->SetSuccessorAt(0, if_spec_object);
   4535       typecheck->SetSuccessorAt(1, not_spec_object);
   4536       FinishCurrentBlock(typecheck);
   4537       AddLeaveInlined(if_spec_object, return_value, state);
   4538       AddLeaveInlined(not_spec_object, receiver, state);
   4539     }
   4540   } else if (state->inlining_kind() == SETTER_CALL_RETURN) {
   4541     // Return from an inlined setter call. The returned value is never used, the
   4542     // value of an assignment is always the value of the RHS of the assignment.
   4543     CHECK_ALIVE(VisitForEffect(stmt->expression()));
   4544     if (context->IsTest()) {
   4545       HValue* rhs = environment()->arguments_environment()->Lookup(1);
   4546       context->ReturnValue(rhs);
   4547     } else if (context->IsEffect()) {
   4548       Goto(function_return(), state);
   4549     } else {
   4550       DCHECK(context->IsValue());
   4551       HValue* rhs = environment()->arguments_environment()->Lookup(1);
   4552       AddLeaveInlined(rhs, state);
   4553     }
   4554   } else {
   4555     // Return from a normal inlined function. Visit the subexpression in the
   4556     // expression context of the call.
   4557     if (context->IsTest()) {
   4558       TestContext* test = TestContext::cast(context);
   4559       VisitForControl(stmt->expression(), test->if_true(), test->if_false());
   4560     } else if (context->IsEffect()) {
   4561       // Visit in value context and ignore the result. This is needed to keep
   4562       // environment in sync with full-codegen since some visitors (e.g.
   4563       // VisitCountOperation) use the operand stack differently depending on
   4564       // context.
   4565       CHECK_ALIVE(VisitForValue(stmt->expression()));
   4566       Pop();
   4567       Goto(function_return(), state);
   4568     } else {
   4569       DCHECK(context->IsValue());
   4570       CHECK_ALIVE(VisitForValue(stmt->expression()));
   4571       AddLeaveInlined(Pop(), state);
   4572     }
   4573   }
   4574   set_current_block(NULL);
   4575 }
   4576 
   4577 
   4578 void HOptimizedGraphBuilder::VisitWithStatement(WithStatement* stmt) {
   4579   DCHECK(!HasStackOverflow());
   4580   DCHECK(current_block() != NULL);
   4581   DCHECK(current_block()->HasPredecessor());
   4582   return Bailout(kWithStatement);
   4583 }
   4584 
   4585 
   4586 void HOptimizedGraphBuilder::VisitSwitchStatement(SwitchStatement* stmt) {
   4587   DCHECK(!HasStackOverflow());
   4588   DCHECK(current_block() != NULL);
   4589   DCHECK(current_block()->HasPredecessor());
   4590 
   4591   ZoneList<CaseClause*>* clauses = stmt->cases();
   4592   int clause_count = clauses->length();
   4593   ZoneList<HBasicBlock*> body_blocks(clause_count, zone());
   4594 
   4595   CHECK_ALIVE(VisitForValue(stmt->tag()));
   4596   Add<HSimulate>(stmt->EntryId());
   4597   HValue* tag_value = Top();
   4598   AstType* tag_type = bounds_.get(stmt->tag()).lower;
   4599 
   4600   // 1. Build all the tests, with dangling true branches
   4601   BailoutId default_id = BailoutId::None();
   4602   for (int i = 0; i < clause_count; ++i) {
   4603     CaseClause* clause = clauses->at(i);
   4604     if (clause->is_default()) {
   4605       body_blocks.Add(NULL, zone());
   4606       if (default_id.IsNone()) default_id = clause->EntryId();
   4607       continue;
   4608     }
   4609 
   4610     // Generate a compare and branch.
   4611     CHECK_BAILOUT(VisitForValue(clause->label()));
   4612     if (current_block() == NULL) return Bailout(kUnsupportedSwitchStatement);
   4613     HValue* label_value = Pop();
   4614 
   4615     AstType* label_type = bounds_.get(clause->label()).lower;
   4616     AstType* combined_type = clause->compare_type();
   4617     HControlInstruction* compare = BuildCompareInstruction(
   4618         Token::EQ_STRICT, tag_value, label_value, tag_type, label_type,
   4619         combined_type,
   4620         ScriptPositionToSourcePosition(stmt->tag()->position()),
   4621         ScriptPositionToSourcePosition(clause->label()->position()),
   4622         PUSH_BEFORE_SIMULATE, clause->id());
   4623 
   4624     HBasicBlock* next_test_block = graph()->CreateBasicBlock();
   4625     HBasicBlock* body_block = graph()->CreateBasicBlock();
   4626     body_blocks.Add(body_block, zone());
   4627     compare->SetSuccessorAt(0, body_block);
   4628     compare->SetSuccessorAt(1, next_test_block);
   4629     FinishCurrentBlock(compare);
   4630 
   4631     set_current_block(body_block);
   4632     Drop(1);  // tag_value
   4633 
   4634     set_current_block(next_test_block);
   4635   }
   4636 
   4637   // Save the current block to use for the default or to join with the
   4638   // exit.
   4639   HBasicBlock* last_block = current_block();
   4640   Drop(1);  // tag_value
   4641 
   4642   // 2. Loop over the clauses and the linked list of tests in lockstep,
   4643   // translating the clause bodies.
   4644   HBasicBlock* fall_through_block = NULL;
   4645 
   4646   BreakAndContinueInfo break_info(stmt, scope());
   4647   { BreakAndContinueScope push(&break_info, this);
   4648     for (int i = 0; i < clause_count; ++i) {
   4649       CaseClause* clause = clauses->at(i);
   4650 
   4651       // Identify the block where normal (non-fall-through) control flow
   4652       // goes to.
   4653       HBasicBlock* normal_block = NULL;
   4654       if (clause->is_default()) {
   4655         if (last_block == NULL) continue;
   4656         normal_block = last_block;
   4657         last_block = NULL;  // Cleared to indicate we've handled it.
   4658       } else {
   4659         normal_block = body_blocks[i];
   4660       }
   4661 
   4662       if (fall_through_block == NULL) {
   4663         set_current_block(normal_block);
   4664       } else {
   4665         HBasicBlock* join = CreateJoin(fall_through_block,
   4666                                        normal_block,
   4667                                        clause->EntryId());
   4668         set_current_block(join);
   4669       }
   4670 
   4671       CHECK_BAILOUT(VisitStatements(clause->statements()));
   4672       fall_through_block = current_block();
   4673     }
   4674   }
   4675 
   4676   // Create an up-to-3-way join.  Use the break block if it exists since
   4677   // it's already a join block.
   4678   HBasicBlock* break_block = break_info.break_block();
   4679   if (break_block == NULL) {
   4680     set_current_block(CreateJoin(fall_through_block,
   4681                                  last_block,
   4682                                  stmt->ExitId()));
   4683   } else {
   4684     if (fall_through_block != NULL) Goto(fall_through_block, break_block);
   4685     if (last_block != NULL) Goto(last_block, break_block);
   4686     break_block->SetJoinId(stmt->ExitId());
   4687     set_current_block(break_block);
   4688   }
   4689 }
   4690 
   4691 void HOptimizedGraphBuilder::VisitLoopBody(IterationStatement* stmt,
   4692                                            BailoutId stack_check_id,
   4693                                            HBasicBlock* loop_entry) {
   4694   Add<HSimulate>(stack_check_id);
   4695   HStackCheck* stack_check =
   4696       HStackCheck::cast(Add<HStackCheck>(HStackCheck::kBackwardsBranch));
   4697   DCHECK(loop_entry->IsLoopHeader());
   4698   loop_entry->loop_information()->set_stack_check(stack_check);
   4699   CHECK_BAILOUT(Visit(stmt->body()));
   4700 }
   4701 
   4702 
   4703 void HOptimizedGraphBuilder::VisitDoWhileStatement(DoWhileStatement* stmt) {
   4704   DCHECK(!HasStackOverflow());
   4705   DCHECK(current_block() != NULL);
   4706   DCHECK(current_block()->HasPredecessor());
   4707   DCHECK(current_block() != NULL);
   4708   HBasicBlock* loop_entry = BuildLoopEntry(stmt);
   4709 
   4710   BreakAndContinueInfo break_info(stmt, scope());
   4711   {
   4712     BreakAndContinueScope push(&break_info, this);
   4713     CHECK_BAILOUT(VisitLoopBody(stmt, stmt->StackCheckId(), loop_entry));
   4714   }
   4715   HBasicBlock* body_exit = JoinContinue(
   4716       stmt, stmt->ContinueId(), current_block(), break_info.continue_block());
   4717   HBasicBlock* loop_successor = NULL;
   4718   if (body_exit != NULL) {
   4719     set_current_block(body_exit);
   4720     loop_successor = graph()->CreateBasicBlock();
   4721     if (stmt->cond()->ToBooleanIsFalse()) {
   4722       loop_entry->loop_information()->stack_check()->Eliminate();
   4723       Goto(loop_successor);
   4724       body_exit = NULL;
   4725     } else {
   4726       // The block for a true condition, the actual predecessor block of the
   4727       // back edge.
   4728       body_exit = graph()->CreateBasicBlock();
   4729       CHECK_BAILOUT(VisitForControl(stmt->cond(), body_exit, loop_successor));
   4730     }
   4731     if (body_exit != NULL && body_exit->HasPredecessor()) {
   4732       body_exit->SetJoinId(stmt->BackEdgeId());
   4733     } else {
   4734       body_exit = NULL;
   4735     }
   4736     if (loop_successor->HasPredecessor()) {
   4737       loop_successor->SetJoinId(stmt->ExitId());
   4738     } else {
   4739       loop_successor = NULL;
   4740     }
   4741   }
   4742   HBasicBlock* loop_exit = CreateLoop(stmt,
   4743                                       loop_entry,
   4744                                       body_exit,
   4745                                       loop_successor,
   4746                                       break_info.break_block());
   4747   set_current_block(loop_exit);
   4748 }
   4749 
   4750 
   4751 void HOptimizedGraphBuilder::VisitWhileStatement(WhileStatement* stmt) {
   4752   DCHECK(!HasStackOverflow());
   4753   DCHECK(current_block() != NULL);
   4754   DCHECK(current_block()->HasPredecessor());
   4755   DCHECK(current_block() != NULL);
   4756   HBasicBlock* loop_entry = BuildLoopEntry(stmt);
   4757 
   4758   // If the condition is constant true, do not generate a branch.
   4759   HBasicBlock* loop_successor = NULL;
   4760   HBasicBlock* body_entry = graph()->CreateBasicBlock();
   4761   loop_successor = graph()->CreateBasicBlock();
   4762   CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor));
   4763   if (body_entry->HasPredecessor()) {
   4764     body_entry->SetJoinId(stmt->BodyId());
   4765     set_current_block(body_entry);
   4766   }
   4767   if (loop_successor->HasPredecessor()) {
   4768     loop_successor->SetJoinId(stmt->ExitId());
   4769   } else {
   4770     loop_successor = NULL;
   4771   }
   4772 
   4773   BreakAndContinueInfo break_info(stmt, scope());
   4774   if (current_block() != NULL) {
   4775     BreakAndContinueScope push(&break_info, this);
   4776     CHECK_BAILOUT(VisitLoopBody(stmt, stmt->StackCheckId(), loop_entry));
   4777   }
   4778   HBasicBlock* body_exit = JoinContinue(
   4779       stmt, stmt->ContinueId(), current_block(), break_info.continue_block());
   4780   HBasicBlock* loop_exit = CreateLoop(stmt,
   4781                                       loop_entry,
   4782                                       body_exit,
   4783                                       loop_successor,
   4784                                       break_info.break_block());
   4785   set_current_block(loop_exit);
   4786 }
   4787 
   4788 
   4789 void HOptimizedGraphBuilder::VisitForStatement(ForStatement* stmt) {
   4790   DCHECK(!HasStackOverflow());
   4791   DCHECK(current_block() != NULL);
   4792   DCHECK(current_block()->HasPredecessor());
   4793   if (stmt->init() != NULL) {
   4794     CHECK_ALIVE(Visit(stmt->init()));
   4795   }
   4796   DCHECK(current_block() != NULL);
   4797   HBasicBlock* loop_entry = BuildLoopEntry(stmt);
   4798 
   4799   HBasicBlock* loop_successor = graph()->CreateBasicBlock();
   4800   HBasicBlock* body_entry = graph()->CreateBasicBlock();
   4801   if (stmt->cond() != NULL) {
   4802     CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor));
   4803     if (body_entry->HasPredecessor()) {
   4804       body_entry->SetJoinId(stmt->BodyId());
   4805       set_current_block(body_entry);
   4806     }
   4807     if (loop_successor->HasPredecessor()) {
   4808       loop_successor->SetJoinId(stmt->ExitId());
   4809     } else {
   4810       loop_successor = NULL;
   4811     }
   4812   } else {
   4813     // Create dummy control flow so that variable liveness analysis
   4814     // produces teh correct result.
   4815     HControlInstruction* branch = New<HBranch>(graph()->GetConstantTrue());
   4816     branch->SetSuccessorAt(0, body_entry);
   4817     branch->SetSuccessorAt(1, loop_successor);
   4818     FinishCurrentBlock(branch);
   4819     set_current_block(body_entry);
   4820   }
   4821 
   4822   BreakAndContinueInfo break_info(stmt, scope());
   4823   if (current_block() != NULL) {
   4824     BreakAndContinueScope push(&break_info, this);
   4825     CHECK_BAILOUT(VisitLoopBody(stmt, stmt->StackCheckId(), loop_entry));
   4826   }
   4827   HBasicBlock* body_exit = JoinContinue(
   4828       stmt, stmt->ContinueId(), current_block(), break_info.continue_block());
   4829 
   4830   if (stmt->next() != NULL && body_exit != NULL) {
   4831     set_current_block(body_exit);
   4832     CHECK_BAILOUT(Visit(stmt->next()));
   4833     body_exit = current_block();
   4834   }
   4835 
   4836   HBasicBlock* loop_exit = CreateLoop(stmt,
   4837                                       loop_entry,
   4838                                       body_exit,
   4839                                       loop_successor,
   4840                                       break_info.break_block());
   4841   set_current_block(loop_exit);
   4842 }
   4843 
   4844 
   4845 void HOptimizedGraphBuilder::VisitForInStatement(ForInStatement* stmt) {
   4846   DCHECK(!HasStackOverflow());
   4847   DCHECK(current_block() != NULL);
   4848   DCHECK(current_block()->HasPredecessor());
   4849 
   4850   if (!stmt->each()->IsVariableProxy() ||
   4851       !stmt->each()->AsVariableProxy()->var()->IsStackLocal()) {
   4852     return Bailout(kForInStatementWithNonLocalEachVariable);
   4853   }
   4854 
   4855   Variable* each_var = stmt->each()->AsVariableProxy()->var();
   4856 
   4857   CHECK_ALIVE(VisitForValue(stmt->enumerable()));
   4858   HValue* enumerable = Top();  // Leave enumerable at the top.
   4859 
   4860   IfBuilder if_undefined_or_null(this);
   4861   if_undefined_or_null.If<HCompareObjectEqAndBranch>(
   4862       enumerable, graph()->GetConstantUndefined());
   4863   if_undefined_or_null.Or();
   4864   if_undefined_or_null.If<HCompareObjectEqAndBranch>(
   4865       enumerable, graph()->GetConstantNull());
   4866   if_undefined_or_null.ThenDeopt(DeoptimizeReason::kUndefinedOrNullInForIn);
   4867   if_undefined_or_null.End();
   4868   BuildForInBody(stmt, each_var, enumerable);
   4869 }
   4870 
   4871 
   4872 void HOptimizedGraphBuilder::BuildForInBody(ForInStatement* stmt,
   4873                                             Variable* each_var,
   4874                                             HValue* enumerable) {
   4875   Handle<Map> meta_map = isolate()->factory()->meta_map();
   4876   bool fast = stmt->for_in_type() == ForInStatement::FAST_FOR_IN;
   4877   BuildCheckHeapObject(enumerable);
   4878   Add<HCheckInstanceType>(enumerable, HCheckInstanceType::IS_JS_RECEIVER);
   4879   Add<HSimulate>(stmt->ToObjectId());
   4880   if (fast) {
   4881     HForInPrepareMap* map = Add<HForInPrepareMap>(enumerable);
   4882     Push(map);
   4883     Add<HSimulate>(stmt->EnumId());
   4884     Drop(1);
   4885     Add<HCheckMaps>(map, meta_map);
   4886 
   4887     HForInCacheArray* array = Add<HForInCacheArray>(
   4888         enumerable, map, DescriptorArray::kEnumCacheBridgeCacheIndex);
   4889     HValue* enum_length = BuildEnumLength(map);
   4890 
   4891     HForInCacheArray* index_cache = Add<HForInCacheArray>(
   4892         enumerable, map, DescriptorArray::kEnumCacheBridgeIndicesCacheIndex);
   4893     array->set_index_cache(index_cache);
   4894 
   4895     Push(map);
   4896     Push(array);
   4897     Push(enum_length);
   4898     Add<HSimulate>(stmt->PrepareId());
   4899   } else {
   4900     Runtime::FunctionId function_id = Runtime::kForInEnumerate;
   4901     Add<HPushArguments>(enumerable);
   4902     HCallRuntime* array =
   4903         Add<HCallRuntime>(Runtime::FunctionForId(function_id), 1);
   4904     Push(array);
   4905     Add<HSimulate>(stmt->EnumId());
   4906     Drop(1);
   4907 
   4908     IfBuilder if_fast(this);
   4909     if_fast.If<HCompareMap>(array, meta_map);
   4910     if_fast.Then();
   4911     {
   4912       HValue* cache_map = array;
   4913       HForInCacheArray* cache = Add<HForInCacheArray>(
   4914           enumerable, cache_map, DescriptorArray::kEnumCacheBridgeCacheIndex);
   4915       HValue* enum_length = BuildEnumLength(cache_map);
   4916       Push(cache_map);
   4917       Push(cache);
   4918       Push(enum_length);
   4919       Add<HSimulate>(stmt->PrepareId(), FIXED_SIMULATE);
   4920     }
   4921     if_fast.Else();
   4922     {
   4923       Push(graph()->GetConstant1());
   4924       Push(array);
   4925       Push(AddLoadFixedArrayLength(array));
   4926       Add<HSimulate>(stmt->PrepareId(), FIXED_SIMULATE);
   4927     }
   4928   }
   4929 
   4930   Push(graph()->GetConstant0());
   4931 
   4932   HBasicBlock* loop_entry = BuildLoopEntry(stmt);
   4933 
   4934   // Reload the values to ensure we have up-to-date values inside of the loop.
   4935   // This is relevant especially for OSR where the values don't come from the
   4936   // computation above, but from the OSR entry block.
   4937   HValue* index = environment()->ExpressionStackAt(0);
   4938   HValue* limit = environment()->ExpressionStackAt(1);
   4939   HValue* array = environment()->ExpressionStackAt(2);
   4940   HValue* type = environment()->ExpressionStackAt(3);
   4941   enumerable = environment()->ExpressionStackAt(4);
   4942 
   4943   // Check that we still have more keys.
   4944   HCompareNumericAndBranch* compare_index =
   4945       New<HCompareNumericAndBranch>(index, limit, Token::LT);
   4946   compare_index->set_observed_input_representation(
   4947       Representation::Smi(), Representation::Smi());
   4948 
   4949   HBasicBlock* loop_body = graph()->CreateBasicBlock();
   4950   HBasicBlock* loop_successor = graph()->CreateBasicBlock();
   4951 
   4952   compare_index->SetSuccessorAt(0, loop_body);
   4953   compare_index->SetSuccessorAt(1, loop_successor);
   4954   FinishCurrentBlock(compare_index);
   4955 
   4956   set_current_block(loop_successor);
   4957   Drop(5);
   4958 
   4959   set_current_block(loop_body);
   4960 
   4961   // Compute the next enumerated value.
   4962   HValue* key = Add<HLoadKeyed>(array, index, index, nullptr, FAST_ELEMENTS);
   4963 
   4964   HBasicBlock* continue_block = nullptr;
   4965   if (fast) {
   4966     // Check if expected map still matches that of the enumerable.
   4967     Add<HCheckMapValue>(enumerable, type);
   4968     Add<HSimulate>(stmt->FilterId());
   4969   } else {
   4970     // We need the continue block here to be able to skip over invalidated keys.
   4971     continue_block = graph()->CreateBasicBlock();
   4972 
   4973     // We cannot use the IfBuilder here, since we need to be able to jump
   4974     // over the loop body in case of undefined result from %ForInFilter,
   4975     // and the poor soul that is the IfBuilder get's really confused about
   4976     // such "advanced control flow requirements".
   4977     HBasicBlock* if_fast = graph()->CreateBasicBlock();
   4978     HBasicBlock* if_slow = graph()->CreateBasicBlock();
   4979     HBasicBlock* if_slow_pass = graph()->CreateBasicBlock();
   4980     HBasicBlock* if_slow_skip = graph()->CreateBasicBlock();
   4981     HBasicBlock* if_join = graph()->CreateBasicBlock();
   4982 
   4983     // Check if expected map still matches that of the enumerable.
   4984     HValue* enumerable_map =
   4985         Add<HLoadNamedField>(enumerable, nullptr, HObjectAccess::ForMap());
   4986     FinishCurrentBlock(
   4987         New<HCompareObjectEqAndBranch>(enumerable_map, type, if_fast, if_slow));
   4988     set_current_block(if_fast);
   4989     {
   4990       // The enum cache for enumerable is still valid, no need to check key.
   4991       Push(key);
   4992       Goto(if_join);
   4993     }
   4994     set_current_block(if_slow);
   4995     {
   4996       Callable callable = CodeFactory::ForInFilter(isolate());
   4997       HValue* values[] = {key, enumerable};
   4998       HConstant* stub_value = Add<HConstant>(callable.code());
   4999       Push(Add<HCallWithDescriptor>(stub_value, 0, callable.descriptor(),
   5000                                     ArrayVector(values)));
   5001       Add<HSimulate>(stmt->FilterId());
   5002       FinishCurrentBlock(New<HCompareObjectEqAndBranch>(
   5003           Top(), graph()->GetConstantUndefined(), if_slow_skip, if_slow_pass));
   5004     }
   5005     set_current_block(if_slow_pass);
   5006     { Goto(if_join); }
   5007     set_current_block(if_slow_skip);
   5008     {
   5009       // The key is no longer valid for enumerable, skip it.
   5010       Drop(1);
   5011       Goto(continue_block);
   5012     }
   5013     if_join->SetJoinId(stmt->FilterId());
   5014     set_current_block(if_join);
   5015     key = Pop();
   5016   }
   5017 
   5018   Bind(each_var, key);
   5019   Add<HSimulate>(stmt->AssignmentId());
   5020 
   5021   BreakAndContinueInfo break_info(stmt, scope(), 5);
   5022   break_info.set_continue_block(continue_block);
   5023   {
   5024     BreakAndContinueScope push(&break_info, this);
   5025     CHECK_BAILOUT(VisitLoopBody(stmt, stmt->StackCheckId(), loop_entry));
   5026   }
   5027 
   5028   HBasicBlock* body_exit = JoinContinue(
   5029       stmt, stmt->IncrementId(), current_block(), break_info.continue_block());
   5030 
   5031   if (body_exit != NULL) {
   5032     set_current_block(body_exit);
   5033 
   5034     HValue* current_index = Pop();
   5035     HValue* increment =
   5036         AddUncasted<HAdd>(current_index, graph()->GetConstant1());
   5037     increment->ClearFlag(HValue::kCanOverflow);
   5038     Push(increment);
   5039     body_exit = current_block();
   5040   }
   5041 
   5042   HBasicBlock* loop_exit = CreateLoop(stmt,
   5043                                       loop_entry,
   5044                                       body_exit,
   5045                                       loop_successor,
   5046                                       break_info.break_block());
   5047 
   5048   set_current_block(loop_exit);
   5049 }
   5050 
   5051 
   5052 void HOptimizedGraphBuilder::VisitForOfStatement(ForOfStatement* stmt) {
   5053   DCHECK(!HasStackOverflow());
   5054   DCHECK(current_block() != NULL);
   5055   DCHECK(current_block()->HasPredecessor());
   5056   return Bailout(kForOfStatement);
   5057 }
   5058 
   5059 
   5060 void HOptimizedGraphBuilder::VisitTryCatchStatement(TryCatchStatement* stmt) {
   5061   DCHECK(!HasStackOverflow());
   5062   DCHECK(current_block() != NULL);
   5063   DCHECK(current_block()->HasPredecessor());
   5064   return Bailout(kTryCatchStatement);
   5065 }
   5066 
   5067 
   5068 void HOptimizedGraphBuilder::VisitTryFinallyStatement(
   5069     TryFinallyStatement* stmt) {
   5070   DCHECK(!HasStackOverflow());
   5071   DCHECK(current_block() != NULL);
   5072   DCHECK(current_block()->HasPredecessor());
   5073   return Bailout(kTryFinallyStatement);
   5074 }
   5075 
   5076 
   5077 void HOptimizedGraphBuilder::VisitDebuggerStatement(DebuggerStatement* stmt) {
   5078   DCHECK(!HasStackOverflow());
   5079   DCHECK(current_block() != NULL);
   5080   DCHECK(current_block()->HasPredecessor());
   5081   return Bailout(kDebuggerStatement);
   5082 }
   5083 
   5084 
   5085 void HOptimizedGraphBuilder::VisitCaseClause(CaseClause* clause) {
   5086   UNREACHABLE();
   5087 }
   5088 
   5089 
   5090 void HOptimizedGraphBuilder::VisitFunctionLiteral(FunctionLiteral* expr) {
   5091   DCHECK(!HasStackOverflow());
   5092   DCHECK(current_block() != NULL);
   5093   DCHECK(current_block()->HasPredecessor());
   5094   Handle<SharedFunctionInfo> shared_info = Compiler::GetSharedFunctionInfo(
   5095       expr, current_info()->script(), top_info());
   5096   // We also have a stack overflow if the recursive compilation did.
   5097   if (HasStackOverflow()) return;
   5098   // Use the fast case closure allocation code that allocates in new
   5099   // space for nested functions that don't need pretenuring.
   5100   HConstant* shared_info_value = Add<HConstant>(shared_info);
   5101   HInstruction* instr;
   5102   if (!expr->pretenure()) {
   5103     FastNewClosureStub stub(isolate());
   5104     FastNewClosureDescriptor descriptor(isolate());
   5105     HValue* values[] = {shared_info_value};
   5106     HConstant* stub_value = Add<HConstant>(stub.GetCode());
   5107     instr = New<HCallWithDescriptor>(stub_value, 0, descriptor,
   5108                                      ArrayVector(values));
   5109   } else {
   5110     Add<HPushArguments>(shared_info_value);
   5111     Runtime::FunctionId function_id =
   5112         expr->pretenure() ? Runtime::kNewClosure_Tenured : Runtime::kNewClosure;
   5113     instr = New<HCallRuntime>(Runtime::FunctionForId(function_id), 1);
   5114   }
   5115   return ast_context()->ReturnInstruction(instr, expr->id());
   5116 }
   5117 
   5118 
   5119 void HOptimizedGraphBuilder::VisitClassLiteral(ClassLiteral* lit) {
   5120   DCHECK(!HasStackOverflow());
   5121   DCHECK(current_block() != NULL);
   5122   DCHECK(current_block()->HasPredecessor());
   5123   return Bailout(kClassLiteral);
   5124 }
   5125 
   5126 
   5127 void HOptimizedGraphBuilder::VisitNativeFunctionLiteral(
   5128     NativeFunctionLiteral* expr) {
   5129   DCHECK(!HasStackOverflow());
   5130   DCHECK(current_block() != NULL);
   5131   DCHECK(current_block()->HasPredecessor());
   5132   return Bailout(kNativeFunctionLiteral);
   5133 }
   5134 
   5135 
   5136 void HOptimizedGraphBuilder::VisitDoExpression(DoExpression* expr) {
   5137   DoExpressionScope scope(this);
   5138   DCHECK(!HasStackOverflow());
   5139   DCHECK(current_block() != NULL);
   5140   DCHECK(current_block()->HasPredecessor());
   5141   CHECK_ALIVE(VisitBlock(expr->block()));
   5142   Visit(expr->result());
   5143 }
   5144 
   5145 
   5146 void HOptimizedGraphBuilder::VisitConditional(Conditional* expr) {
   5147   DCHECK(!HasStackOverflow());
   5148   DCHECK(current_block() != NULL);
   5149   DCHECK(current_block()->HasPredecessor());
   5150   HBasicBlock* cond_true = graph()->CreateBasicBlock();
   5151   HBasicBlock* cond_false = graph()->CreateBasicBlock();
   5152   CHECK_BAILOUT(VisitForControl(expr->condition(), cond_true, cond_false));
   5153 
   5154   // Visit the true and false subexpressions in the same AST context as the
   5155   // whole expression.
   5156   if (cond_true->HasPredecessor()) {
   5157     cond_true->SetJoinId(expr->ThenId());
   5158     set_current_block(cond_true);
   5159     CHECK_BAILOUT(Visit(expr->then_expression()));
   5160     cond_true = current_block();
   5161   } else {
   5162     cond_true = NULL;
   5163   }
   5164 
   5165   if (cond_false->HasPredecessor()) {
   5166     cond_false->SetJoinId(expr->ElseId());
   5167     set_current_block(cond_false);
   5168     CHECK_BAILOUT(Visit(expr->else_expression()));
   5169     cond_false = current_block();
   5170   } else {
   5171     cond_false = NULL;
   5172   }
   5173 
   5174   if (!ast_context()->IsTest()) {
   5175     HBasicBlock* join = CreateJoin(cond_true, cond_false, expr->id());
   5176     set_current_block(join);
   5177     if (join != NULL && !ast_context()->IsEffect()) {
   5178       return ast_context()->ReturnValue(Pop());
   5179     }
   5180   }
   5181 }
   5182 
   5183 bool HOptimizedGraphBuilder::CanInlineGlobalPropertyAccess(
   5184     Variable* var, LookupIterator* it, PropertyAccessType access_type) {
   5185   if (var->is_this()) return false;
   5186   return CanInlineGlobalPropertyAccess(it, access_type);
   5187 }
   5188 
   5189 bool HOptimizedGraphBuilder::CanInlineGlobalPropertyAccess(
   5190     LookupIterator* it, PropertyAccessType access_type) {
   5191   if (!current_info()->has_global_object()) {
   5192     return false;
   5193   }
   5194 
   5195   switch (it->state()) {
   5196     case LookupIterator::ACCESSOR:
   5197     case LookupIterator::ACCESS_CHECK:
   5198     case LookupIterator::INTERCEPTOR:
   5199     case LookupIterator::INTEGER_INDEXED_EXOTIC:
   5200     case LookupIterator::NOT_FOUND:
   5201       return false;
   5202     case LookupIterator::DATA:
   5203       if (access_type == STORE && it->IsReadOnly()) return false;
   5204       if (!it->GetHolder<JSObject>()->IsJSGlobalObject()) return false;
   5205       return true;
   5206     case LookupIterator::JSPROXY:
   5207     case LookupIterator::TRANSITION:
   5208       UNREACHABLE();
   5209   }
   5210   UNREACHABLE();
   5211   return false;
   5212 }
   5213 
   5214 
   5215 HValue* HOptimizedGraphBuilder::BuildContextChainWalk(Variable* var) {
   5216   DCHECK(var->IsContextSlot());
   5217   HValue* context = environment()->context();
   5218   int length = scope()->ContextChainLength(var->scope());
   5219   while (length-- > 0) {
   5220     context = Add<HLoadNamedField>(
   5221         context, nullptr,
   5222         HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
   5223   }
   5224   return context;
   5225 }
   5226 
   5227 void HOptimizedGraphBuilder::InlineGlobalPropertyLoad(LookupIterator* it,
   5228                                                       BailoutId ast_id) {
   5229   Handle<PropertyCell> cell = it->GetPropertyCell();
   5230   top_info()->dependencies()->AssumePropertyCell(cell);
   5231   auto cell_type = it->property_details().cell_type();
   5232   if (cell_type == PropertyCellType::kConstant ||
   5233       cell_type == PropertyCellType::kUndefined) {
   5234     Handle<Object> constant_object(cell->value(), isolate());
   5235     if (constant_object->IsConsString()) {
   5236       constant_object = String::Flatten(Handle<String>::cast(constant_object));
   5237     }
   5238     HConstant* constant = New<HConstant>(constant_object);
   5239     return ast_context()->ReturnInstruction(constant, ast_id);
   5240   } else {
   5241     auto access = HObjectAccess::ForPropertyCellValue();
   5242     UniqueSet<Map>* field_maps = nullptr;
   5243     if (cell_type == PropertyCellType::kConstantType) {
   5244       switch (cell->GetConstantType()) {
   5245         case PropertyCellConstantType::kSmi:
   5246           access = access.WithRepresentation(Representation::Smi());
   5247           break;
   5248         case PropertyCellConstantType::kStableMap: {
   5249           // Check that the map really is stable. The heap object could
   5250           // have mutated without the cell updating state. In that case,
   5251           // make no promises about the loaded value except that it's a
   5252           // heap object.
   5253           access = access.WithRepresentation(Representation::HeapObject());
   5254           Handle<Map> map(HeapObject::cast(cell->value())->map());
   5255           if (map->is_stable()) {
   5256             field_maps = new (zone())
   5257                 UniqueSet<Map>(Unique<Map>::CreateImmovable(map), zone());
   5258           }
   5259           break;
   5260         }
   5261       }
   5262     }
   5263     HConstant* cell_constant = Add<HConstant>(cell);
   5264     HLoadNamedField* instr;
   5265     if (field_maps == nullptr) {
   5266       instr = New<HLoadNamedField>(cell_constant, nullptr, access);
   5267     } else {
   5268       instr = New<HLoadNamedField>(cell_constant, nullptr, access, field_maps,
   5269                                    HType::HeapObject());
   5270     }
   5271     instr->ClearDependsOnFlag(kInobjectFields);
   5272     instr->SetDependsOnFlag(kGlobalVars);
   5273     return ast_context()->ReturnInstruction(instr, ast_id);
   5274   }
   5275 }
   5276 
   5277 void HOptimizedGraphBuilder::VisitVariableProxy(VariableProxy* expr) {
   5278   DCHECK(!HasStackOverflow());
   5279   DCHECK(current_block() != NULL);
   5280   DCHECK(current_block()->HasPredecessor());
   5281   Variable* variable = expr->var();
   5282   switch (variable->location()) {
   5283     case VariableLocation::UNALLOCATED: {
   5284       if (IsLexicalVariableMode(variable->mode())) {
   5285         // TODO(rossberg): should this be an DCHECK?
   5286         return Bailout(kReferenceToGlobalLexicalVariable);
   5287       }
   5288       // Handle known global constants like 'undefined' specially to avoid a
   5289       // load from a global cell for them.
   5290       Handle<Object> constant_value =
   5291           isolate()->factory()->GlobalConstantFor(variable->name());
   5292       if (!constant_value.is_null()) {
   5293         HConstant* instr = New<HConstant>(constant_value);
   5294         return ast_context()->ReturnInstruction(instr, expr->id());
   5295       }
   5296 
   5297       Handle<JSGlobalObject> global(current_info()->global_object());
   5298 
   5299       // Lookup in script contexts.
   5300       {
   5301         Handle<ScriptContextTable> script_contexts(
   5302             global->native_context()->script_context_table());
   5303         ScriptContextTable::LookupResult lookup;
   5304         if (ScriptContextTable::Lookup(script_contexts, variable->name(),
   5305                                        &lookup)) {
   5306           Handle<Context> script_context = ScriptContextTable::GetContext(
   5307               script_contexts, lookup.context_index);
   5308           Handle<Object> current_value =
   5309               FixedArray::get(*script_context, lookup.slot_index, isolate());
   5310 
   5311           // If the values is not the hole, it will stay initialized,
   5312           // so no need to generate a check.
   5313           if (current_value->IsTheHole(isolate())) {
   5314             return Bailout(kReferenceToUninitializedVariable);
   5315           }
   5316           HInstruction* result = New<HLoadNamedField>(
   5317               Add<HConstant>(script_context), nullptr,
   5318               HObjectAccess::ForContextSlot(lookup.slot_index));
   5319           return ast_context()->ReturnInstruction(result, expr->id());
   5320         }
   5321       }
   5322 
   5323       LookupIterator it(global, variable->name(), LookupIterator::OWN);
   5324       it.TryLookupCachedProperty();
   5325       if (CanInlineGlobalPropertyAccess(variable, &it, LOAD)) {
   5326         InlineGlobalPropertyLoad(&it, expr->id());
   5327         return;
   5328       } else {
   5329         Handle<TypeFeedbackVector> vector(current_feedback_vector(), isolate());
   5330 
   5331         HValue* vector_value = Add<HConstant>(vector);
   5332         HValue* slot_value =
   5333             Add<HConstant>(vector->GetIndex(expr->VariableFeedbackSlot()));
   5334         Callable callable = CodeFactory::LoadGlobalICInOptimizedCode(
   5335             isolate(), ast_context()->typeof_mode());
   5336         HValue* stub = Add<HConstant>(callable.code());
   5337         HValue* values[] = {slot_value, vector_value};
   5338         HCallWithDescriptor* instr = New<HCallWithDescriptor>(
   5339             Code::LOAD_GLOBAL_IC, stub, 0, callable.descriptor(),
   5340             ArrayVector(values));
   5341         return ast_context()->ReturnInstruction(instr, expr->id());
   5342       }
   5343     }
   5344 
   5345     case VariableLocation::PARAMETER:
   5346     case VariableLocation::LOCAL: {
   5347       HValue* value = LookupAndMakeLive(variable);
   5348       if (value == graph()->GetConstantHole()) {
   5349         DCHECK(IsDeclaredVariableMode(variable->mode()) &&
   5350                variable->mode() != VAR);
   5351         return Bailout(kReferenceToUninitializedVariable);
   5352       }
   5353       return ast_context()->ReturnValue(value);
   5354     }
   5355 
   5356     case VariableLocation::CONTEXT: {
   5357       HValue* context = BuildContextChainWalk(variable);
   5358       HLoadContextSlot::Mode mode;
   5359       switch (variable->mode()) {
   5360         case LET:
   5361         case CONST:
   5362           mode = HLoadContextSlot::kCheckDeoptimize;
   5363           break;
   5364         default:
   5365           mode = HLoadContextSlot::kNoCheck;
   5366           break;
   5367       }
   5368       HLoadContextSlot* instr =
   5369           new(zone()) HLoadContextSlot(context, variable->index(), mode);
   5370       return ast_context()->ReturnInstruction(instr, expr->id());
   5371     }
   5372 
   5373     case VariableLocation::LOOKUP:
   5374       return Bailout(kReferenceToAVariableWhichRequiresDynamicLookup);
   5375 
   5376     case VariableLocation::MODULE:
   5377       UNREACHABLE();
   5378   }
   5379 }
   5380 
   5381 
   5382 void HOptimizedGraphBuilder::VisitLiteral(Literal* expr) {
   5383   DCHECK(!HasStackOverflow());
   5384   DCHECK(current_block() != NULL);
   5385   DCHECK(current_block()->HasPredecessor());
   5386   HConstant* instr = New<HConstant>(expr->value());
   5387   return ast_context()->ReturnInstruction(instr, expr->id());
   5388 }
   5389 
   5390 
   5391 void HOptimizedGraphBuilder::VisitRegExpLiteral(RegExpLiteral* expr) {
   5392   DCHECK(!HasStackOverflow());
   5393   DCHECK(current_block() != NULL);
   5394   DCHECK(current_block()->HasPredecessor());
   5395   Callable callable = CodeFactory::FastCloneRegExp(isolate());
   5396   HValue* values[] = {AddThisFunction(), Add<HConstant>(expr->literal_index()),
   5397                       Add<HConstant>(expr->pattern()),
   5398                       Add<HConstant>(expr->flags())};
   5399   HConstant* stub_value = Add<HConstant>(callable.code());
   5400   HInstruction* instr = New<HCallWithDescriptor>(
   5401       stub_value, 0, callable.descriptor(), ArrayVector(values));
   5402   return ast_context()->ReturnInstruction(instr, expr->id());
   5403 }
   5404 
   5405 
   5406 static bool CanInlinePropertyAccess(Handle<Map> map) {
   5407   if (map->instance_type() == HEAP_NUMBER_TYPE) return true;
   5408   if (map->instance_type() < FIRST_NONSTRING_TYPE) return true;
   5409   return map->IsJSObjectMap() && !map->is_dictionary_map() &&
   5410          !map->has_named_interceptor() &&
   5411          // TODO(verwaest): Whitelist contexts to which we have access.
   5412          !map->is_access_check_needed();
   5413 }
   5414 
   5415 
   5416 // Determines whether the given array or object literal boilerplate satisfies
   5417 // all limits to be considered for fast deep-copying and computes the total
   5418 // size of all objects that are part of the graph.
   5419 static bool IsFastLiteral(Handle<JSObject> boilerplate,
   5420                           int max_depth,
   5421                           int* max_properties) {
   5422   if (boilerplate->map()->is_deprecated() &&
   5423       !JSObject::TryMigrateInstance(boilerplate)) {
   5424     return false;
   5425   }
   5426 
   5427   DCHECK(max_depth >= 0 && *max_properties >= 0);
   5428   if (max_depth == 0) return false;
   5429 
   5430   Isolate* isolate = boilerplate->GetIsolate();
   5431   Handle<FixedArrayBase> elements(boilerplate->elements());
   5432   if (elements->length() > 0 &&
   5433       elements->map() != isolate->heap()->fixed_cow_array_map()) {
   5434     if (boilerplate->HasFastSmiOrObjectElements()) {
   5435       Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements);
   5436       int length = elements->length();
   5437       for (int i = 0; i < length; i++) {
   5438         if ((*max_properties)-- == 0) return false;
   5439         Handle<Object> value(fast_elements->get(i), isolate);
   5440         if (value->IsJSObject()) {
   5441           Handle<JSObject> value_object = Handle<JSObject>::cast(value);
   5442           if (!IsFastLiteral(value_object,
   5443                              max_depth - 1,
   5444                              max_properties)) {
   5445             return false;
   5446           }
   5447         }
   5448       }
   5449     } else if (!boilerplate->HasFastDoubleElements()) {
   5450       return false;
   5451     }
   5452   }
   5453 
   5454   Handle<FixedArray> properties(boilerplate->properties());
   5455   if (properties->length() > 0) {
   5456     return false;
   5457   } else {
   5458     Handle<DescriptorArray> descriptors(
   5459         boilerplate->map()->instance_descriptors());
   5460     int limit = boilerplate->map()->NumberOfOwnDescriptors();
   5461     for (int i = 0; i < limit; i++) {
   5462       PropertyDetails details = descriptors->GetDetails(i);
   5463       if (details.type() != DATA) continue;
   5464       if ((*max_properties)-- == 0) return false;
   5465       FieldIndex field_index = FieldIndex::ForDescriptor(boilerplate->map(), i);
   5466       if (boilerplate->IsUnboxedDoubleField(field_index)) continue;
   5467       Handle<Object> value(boilerplate->RawFastPropertyAt(field_index),
   5468                            isolate);
   5469       if (value->IsJSObject()) {
   5470         Handle<JSObject> value_object = Handle<JSObject>::cast(value);
   5471         if (!IsFastLiteral(value_object,
   5472                            max_depth - 1,
   5473                            max_properties)) {
   5474           return false;
   5475         }
   5476       }
   5477     }
   5478   }
   5479   return true;
   5480 }
   5481 
   5482 
   5483 void HOptimizedGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) {
   5484   DCHECK(!HasStackOverflow());
   5485   DCHECK(current_block() != NULL);
   5486   DCHECK(current_block()->HasPredecessor());
   5487 
   5488   Handle<JSFunction> closure = function_state()->compilation_info()->closure();
   5489   HInstruction* literal;
   5490 
   5491   // Check whether to use fast or slow deep-copying for boilerplate.
   5492   int max_properties = kMaxFastLiteralProperties;
   5493   Handle<Object> literals_cell(
   5494       closure->literals()->literal(expr->literal_index()), isolate());
   5495   Handle<AllocationSite> site;
   5496   Handle<JSObject> boilerplate;
   5497   if (!literals_cell->IsUndefined(isolate())) {
   5498     // Retrieve the boilerplate
   5499     site = Handle<AllocationSite>::cast(literals_cell);
   5500     boilerplate = Handle<JSObject>(JSObject::cast(site->transition_info()),
   5501                                    isolate());
   5502   }
   5503 
   5504   if (!boilerplate.is_null() &&
   5505       IsFastLiteral(boilerplate, kMaxFastLiteralDepth, &max_properties)) {
   5506     AllocationSiteUsageContext site_context(isolate(), site, false);
   5507     site_context.EnterNewScope();
   5508     literal = BuildFastLiteral(boilerplate, &site_context);
   5509     site_context.ExitScope(site, boilerplate);
   5510   } else {
   5511     NoObservableSideEffectsScope no_effects(this);
   5512     Handle<FixedArray> constant_properties = expr->constant_properties();
   5513     int literal_index = expr->literal_index();
   5514     int flags = expr->ComputeFlags(true);
   5515 
   5516     Add<HPushArguments>(AddThisFunction(), Add<HConstant>(literal_index),
   5517                         Add<HConstant>(constant_properties),
   5518                         Add<HConstant>(flags));
   5519 
   5520     Runtime::FunctionId function_id = Runtime::kCreateObjectLiteral;
   5521     literal = Add<HCallRuntime>(Runtime::FunctionForId(function_id), 4);
   5522   }
   5523 
   5524   // The object is expected in the bailout environment during computation
   5525   // of the property values and is the value of the entire expression.
   5526   Push(literal);
   5527   for (int i = 0; i < expr->properties()->length(); i++) {
   5528     ObjectLiteral::Property* property = expr->properties()->at(i);
   5529     if (property->is_computed_name()) return Bailout(kComputedPropertyName);
   5530     if (property->IsCompileTimeValue()) continue;
   5531 
   5532     Literal* key = property->key()->AsLiteral();
   5533     Expression* value = property->value();
   5534 
   5535     switch (property->kind()) {
   5536       case ObjectLiteral::Property::MATERIALIZED_LITERAL:
   5537         DCHECK(!CompileTimeValue::IsCompileTimeValue(value));
   5538         // Fall through.
   5539       case ObjectLiteral::Property::COMPUTED:
   5540         // It is safe to use [[Put]] here because the boilerplate already
   5541         // contains computed properties with an uninitialized value.
   5542         if (key->IsStringLiteral()) {
   5543           DCHECK(key->IsPropertyName());
   5544           if (property->emit_store()) {
   5545             CHECK_ALIVE(VisitForValue(value));
   5546             HValue* value = Pop();
   5547 
   5548             Handle<Map> map = property->GetReceiverType();
   5549             Handle<String> name = key->AsPropertyName();
   5550             HValue* store;
   5551             FeedbackVectorSlot slot = property->GetSlot();
   5552             if (map.is_null()) {
   5553               // If we don't know the monomorphic type, do a generic store.
   5554               CHECK_ALIVE(store = BuildNamedGeneric(STORE, NULL, slot, literal,
   5555                                                     name, value));
   5556             } else {
   5557               PropertyAccessInfo info(this, STORE, map, name);
   5558               if (info.CanAccessMonomorphic()) {
   5559                 HValue* checked_literal = Add<HCheckMaps>(literal, map);
   5560                 DCHECK(!info.IsAccessorConstant());
   5561                 store = BuildMonomorphicAccess(
   5562                     &info, literal, checked_literal, value,
   5563                     BailoutId::None(), BailoutId::None());
   5564               } else {
   5565                 CHECK_ALIVE(store = BuildNamedGeneric(STORE, NULL, slot,
   5566                                                       literal, name, value));
   5567               }
   5568             }
   5569             if (store->IsInstruction()) {
   5570               AddInstruction(HInstruction::cast(store));
   5571             }
   5572             DCHECK(store->HasObservableSideEffects());
   5573             Add<HSimulate>(key->id(), REMOVABLE_SIMULATE);
   5574 
   5575             // Add [[HomeObject]] to function literals.
   5576             if (FunctionLiteral::NeedsHomeObject(property->value())) {
   5577               Handle<Symbol> sym = isolate()->factory()->home_object_symbol();
   5578               HInstruction* store_home = BuildNamedGeneric(
   5579                   STORE, NULL, property->GetSlot(1), value, sym, literal);
   5580               AddInstruction(store_home);
   5581               DCHECK(store_home->HasObservableSideEffects());
   5582               Add<HSimulate>(property->value()->id(), REMOVABLE_SIMULATE);
   5583             }
   5584           } else {
   5585             CHECK_ALIVE(VisitForEffect(value));
   5586           }
   5587           break;
   5588         }
   5589         // Fall through.
   5590       case ObjectLiteral::Property::PROTOTYPE:
   5591       case ObjectLiteral::Property::SETTER:
   5592       case ObjectLiteral::Property::GETTER:
   5593         return Bailout(kObjectLiteralWithComplexProperty);
   5594       default: UNREACHABLE();
   5595     }
   5596   }
   5597 
   5598   return ast_context()->ReturnValue(Pop());
   5599 }
   5600 
   5601 
   5602 void HOptimizedGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
   5603   DCHECK(!HasStackOverflow());
   5604   DCHECK(current_block() != NULL);
   5605   DCHECK(current_block()->HasPredecessor());
   5606   ZoneList<Expression*>* subexprs = expr->values();
   5607   int length = subexprs->length();
   5608   HInstruction* literal;
   5609 
   5610   Handle<AllocationSite> site;
   5611   Handle<LiteralsArray> literals(environment()->closure()->literals(),
   5612                                  isolate());
   5613   Handle<Object> literals_cell(literals->literal(expr->literal_index()),
   5614                                isolate());
   5615   Handle<JSObject> boilerplate_object;
   5616   if (!literals_cell->IsUndefined(isolate())) {
   5617     DCHECK(literals_cell->IsAllocationSite());
   5618     site = Handle<AllocationSite>::cast(literals_cell);
   5619     boilerplate_object = Handle<JSObject>(
   5620         JSObject::cast(site->transition_info()), isolate());
   5621   }
   5622 
   5623   // Check whether to use fast or slow deep-copying for boilerplate.
   5624   int max_properties = kMaxFastLiteralProperties;
   5625   if (!boilerplate_object.is_null() &&
   5626       IsFastLiteral(boilerplate_object, kMaxFastLiteralDepth,
   5627                     &max_properties)) {
   5628     DCHECK(site->SitePointsToLiteral());
   5629     AllocationSiteUsageContext site_context(isolate(), site, false);
   5630     site_context.EnterNewScope();
   5631     literal = BuildFastLiteral(boilerplate_object, &site_context);
   5632     site_context.ExitScope(site, boilerplate_object);
   5633   } else {
   5634     NoObservableSideEffectsScope no_effects(this);
   5635     Handle<FixedArray> constants = expr->constant_elements();
   5636     int literal_index = expr->literal_index();
   5637     int flags = expr->ComputeFlags(true);
   5638 
   5639     Add<HPushArguments>(AddThisFunction(), Add<HConstant>(literal_index),
   5640                         Add<HConstant>(constants), Add<HConstant>(flags));
   5641 
   5642     Runtime::FunctionId function_id = Runtime::kCreateArrayLiteral;
   5643     literal = Add<HCallRuntime>(Runtime::FunctionForId(function_id), 4);
   5644 
   5645     // Register to deopt if the boilerplate ElementsKind changes.
   5646     if (!site.is_null()) {
   5647       top_info()->dependencies()->AssumeTransitionStable(site);
   5648     }
   5649   }
   5650 
   5651   // The array is expected in the bailout environment during computation
   5652   // of the property values and is the value of the entire expression.
   5653   Push(literal);
   5654 
   5655   HInstruction* elements = NULL;
   5656 
   5657   for (int i = 0; i < length; i++) {
   5658     Expression* subexpr = subexprs->at(i);
   5659     DCHECK(!subexpr->IsSpread());
   5660 
   5661     // If the subexpression is a literal or a simple materialized literal it
   5662     // is already set in the cloned array.
   5663     if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
   5664 
   5665     CHECK_ALIVE(VisitForValue(subexpr));
   5666     HValue* value = Pop();
   5667     if (!Smi::IsValid(i)) return Bailout(kNonSmiKeyInArrayLiteral);
   5668 
   5669     elements = AddLoadElements(literal);
   5670 
   5671     HValue* key = Add<HConstant>(i);
   5672 
   5673     if (!boilerplate_object.is_null()) {
   5674       ElementsKind boilerplate_elements_kind =
   5675           boilerplate_object->GetElementsKind();
   5676       switch (boilerplate_elements_kind) {
   5677         case FAST_SMI_ELEMENTS:
   5678         case FAST_HOLEY_SMI_ELEMENTS:
   5679         case FAST_ELEMENTS:
   5680         case FAST_HOLEY_ELEMENTS:
   5681         case FAST_DOUBLE_ELEMENTS:
   5682         case FAST_HOLEY_DOUBLE_ELEMENTS: {
   5683           Add<HStoreKeyed>(elements, key, value, nullptr,
   5684                            boilerplate_elements_kind);
   5685           break;
   5686         }
   5687         default:
   5688           UNREACHABLE();
   5689           break;
   5690       }
   5691     } else {
   5692       HInstruction* instr = BuildKeyedGeneric(
   5693           STORE, expr, expr->LiteralFeedbackSlot(), literal, key, value);
   5694       AddInstruction(instr);
   5695     }
   5696 
   5697     Add<HSimulate>(expr->GetIdForElement(i));
   5698   }
   5699 
   5700   return ast_context()->ReturnValue(Pop());
   5701 }
   5702 
   5703 
   5704 HCheckMaps* HOptimizedGraphBuilder::AddCheckMap(HValue* object,
   5705                                                 Handle<Map> map) {
   5706   BuildCheckHeapObject(object);
   5707   return Add<HCheckMaps>(object, map);
   5708 }
   5709 
   5710 
   5711 HInstruction* HOptimizedGraphBuilder::BuildLoadNamedField(
   5712     PropertyAccessInfo* info,
   5713     HValue* checked_object) {
   5714   // See if this is a load for an immutable property
   5715   if (checked_object->ActualValue()->IsConstant()) {
   5716     Handle<Object> object(
   5717         HConstant::cast(checked_object->ActualValue())->handle(isolate()));
   5718 
   5719     if (object->IsJSObject()) {
   5720       LookupIterator it(object, info->name(),
   5721                         LookupIterator::OWN_SKIP_INTERCEPTOR);
   5722       Handle<Object> value = JSReceiver::GetDataProperty(&it);
   5723       if (it.IsFound() && it.IsReadOnly() && !it.IsConfigurable()) {
   5724         return New<HConstant>(value);
   5725       }
   5726     }
   5727   }
   5728 
   5729   HObjectAccess access = info->access();
   5730   if (access.representation().IsDouble() &&
   5731       (!FLAG_unbox_double_fields || !access.IsInobject())) {
   5732     // Load the heap number.
   5733     checked_object = Add<HLoadNamedField>(
   5734         checked_object, nullptr,
   5735         access.WithRepresentation(Representation::Tagged()));
   5736     // Load the double value from it.
   5737     access = HObjectAccess::ForHeapNumberValue();
   5738   }
   5739 
   5740   SmallMapList* map_list = info->field_maps();
   5741   if (map_list->length() == 0) {
   5742     return New<HLoadNamedField>(checked_object, checked_object, access);
   5743   }
   5744 
   5745   UniqueSet<Map>* maps = new(zone()) UniqueSet<Map>(map_list->length(), zone());
   5746   for (int i = 0; i < map_list->length(); ++i) {
   5747     maps->Add(Unique<Map>::CreateImmovable(map_list->at(i)), zone());
   5748   }
   5749   return New<HLoadNamedField>(
   5750       checked_object, checked_object, access, maps, info->field_type());
   5751 }
   5752 
   5753 
   5754 HInstruction* HOptimizedGraphBuilder::BuildStoreNamedField(
   5755     PropertyAccessInfo* info,
   5756     HValue* checked_object,
   5757     HValue* value) {
   5758   bool transition_to_field = info->IsTransition();
   5759   // TODO(verwaest): Move this logic into PropertyAccessInfo.
   5760   HObjectAccess field_access = info->access();
   5761 
   5762   HStoreNamedField *instr;
   5763   if (field_access.representation().IsDouble() &&
   5764       (!FLAG_unbox_double_fields || !field_access.IsInobject())) {
   5765     HObjectAccess heap_number_access =
   5766         field_access.WithRepresentation(Representation::Tagged());
   5767     if (transition_to_field) {
   5768       // The store requires a mutable HeapNumber to be allocated.
   5769       NoObservableSideEffectsScope no_side_effects(this);
   5770       HInstruction* heap_number_size = Add<HConstant>(HeapNumber::kSize);
   5771 
   5772       // TODO(hpayer): Allocation site pretenuring support.
   5773       HInstruction* heap_number =
   5774           Add<HAllocate>(heap_number_size, HType::HeapObject(), NOT_TENURED,
   5775                          MUTABLE_HEAP_NUMBER_TYPE, graph()->GetConstant0());
   5776       AddStoreMapConstant(
   5777           heap_number, isolate()->factory()->mutable_heap_number_map());
   5778       Add<HStoreNamedField>(heap_number, HObjectAccess::ForHeapNumberValue(),
   5779                             value);
   5780       instr = New<HStoreNamedField>(checked_object->ActualValue(),
   5781                                     heap_number_access,
   5782                                     heap_number);
   5783     } else {
   5784       // Already holds a HeapNumber; load the box and write its value field.
   5785       HInstruction* heap_number =
   5786           Add<HLoadNamedField>(checked_object, nullptr, heap_number_access);
   5787       instr = New<HStoreNamedField>(heap_number,
   5788                                     HObjectAccess::ForHeapNumberValue(),
   5789                                     value, STORE_TO_INITIALIZED_ENTRY);
   5790     }
   5791   } else {
   5792     if (field_access.representation().IsHeapObject()) {
   5793       BuildCheckHeapObject(value);
   5794     }
   5795 
   5796     if (!info->field_maps()->is_empty()) {
   5797       DCHECK(field_access.representation().IsHeapObject());
   5798       value = Add<HCheckMaps>(value, info->field_maps());
   5799     }
   5800 
   5801     // This is a normal store.
   5802     instr = New<HStoreNamedField>(
   5803         checked_object->ActualValue(), field_access, value,
   5804         transition_to_field ? INITIALIZING_STORE : STORE_TO_INITIALIZED_ENTRY);
   5805   }
   5806 
   5807   if (transition_to_field) {
   5808     Handle<Map> transition(info->transition());
   5809     DCHECK(!transition->is_deprecated());
   5810     instr->SetTransition(Add<HConstant>(transition));
   5811   }
   5812   return instr;
   5813 }
   5814 
   5815 Handle<FieldType>
   5816 HOptimizedGraphBuilder::PropertyAccessInfo::GetFieldTypeFromMap(
   5817     Handle<Map> map) const {
   5818   DCHECK(IsFound());
   5819   DCHECK(number_ < map->NumberOfOwnDescriptors());
   5820   return handle(map->instance_descriptors()->GetFieldType(number_), isolate());
   5821 }
   5822 
   5823 bool HOptimizedGraphBuilder::PropertyAccessInfo::IsCompatible(
   5824     PropertyAccessInfo* info) {
   5825   if (!CanInlinePropertyAccess(map_)) return false;
   5826 
   5827   // Currently only handle AstType::Number as a polymorphic case.
   5828   // TODO(verwaest): Support monomorphic handling of numbers with a HCheckNumber
   5829   // instruction.
   5830   if (IsNumberType()) return false;
   5831 
   5832   // Values are only compatible for monomorphic load if they all behave the same
   5833   // regarding value wrappers.
   5834   if (IsValueWrapped() != info->IsValueWrapped()) return false;
   5835 
   5836   if (!LookupDescriptor()) return false;
   5837 
   5838   if (!IsFound()) {
   5839     return (!info->IsFound() || info->has_holder()) &&
   5840            map()->prototype() == info->map()->prototype();
   5841   }
   5842 
   5843   // Mismatch if the other access info found the property in the prototype
   5844   // chain.
   5845   if (info->has_holder()) return false;
   5846 
   5847   if (IsAccessorConstant()) {
   5848     return accessor_.is_identical_to(info->accessor_) &&
   5849         api_holder_.is_identical_to(info->api_holder_);
   5850   }
   5851 
   5852   if (IsDataConstant()) {
   5853     return constant_.is_identical_to(info->constant_);
   5854   }
   5855 
   5856   DCHECK(IsData());
   5857   if (!info->IsData()) return false;
   5858 
   5859   Representation r = access_.representation();
   5860   if (IsLoad()) {
   5861     if (!info->access_.representation().IsCompatibleForLoad(r)) return false;
   5862   } else {
   5863     if (!info->access_.representation().IsCompatibleForStore(r)) return false;
   5864   }
   5865   if (info->access_.offset() != access_.offset()) return false;
   5866   if (info->access_.IsInobject() != access_.IsInobject()) return false;
   5867   if (IsLoad()) {
   5868     if (field_maps_.is_empty()) {
   5869       info->field_maps_.Clear();
   5870     } else if (!info->field_maps_.is_empty()) {
   5871       for (int i = 0; i < field_maps_.length(); ++i) {
   5872         info->field_maps_.AddMapIfMissing(field_maps_.at(i), info->zone());
   5873       }
   5874       info->field_maps_.Sort();
   5875     }
   5876   } else {
   5877     // We can only merge stores that agree on their field maps. The comparison
   5878     // below is safe, since we keep the field maps sorted.
   5879     if (field_maps_.length() != info->field_maps_.length()) return false;
   5880     for (int i = 0; i < field_maps_.length(); ++i) {
   5881       if (!field_maps_.at(i).is_identical_to(info->field_maps_.at(i))) {
   5882         return false;
   5883       }
   5884     }
   5885   }
   5886   info->GeneralizeRepresentation(r);
   5887   info->field_type_ = info->field_type_.Combine(field_type_);
   5888   return true;
   5889 }
   5890 
   5891 
   5892 bool HOptimizedGraphBuilder::PropertyAccessInfo::LookupDescriptor() {
   5893   if (!map_->IsJSObjectMap()) return true;
   5894   LookupDescriptor(*map_, *name_);
   5895   return LoadResult(map_);
   5896 }
   5897 
   5898 
   5899 bool HOptimizedGraphBuilder::PropertyAccessInfo::LoadResult(Handle<Map> map) {
   5900   if (!IsLoad() && IsProperty() && IsReadOnly()) {
   5901     return false;
   5902   }
   5903 
   5904   if (IsData()) {
   5905     // Construct the object field access.
   5906     int index = GetLocalFieldIndexFromMap(map);
   5907     access_ = HObjectAccess::ForField(map, index, representation(), name_);
   5908 
   5909     // Load field map for heap objects.
   5910     return LoadFieldMaps(map);
   5911   } else if (IsAccessorConstant()) {
   5912     Handle<Object> accessors = GetAccessorsFromMap(map);
   5913     if (!accessors->IsAccessorPair()) return false;
   5914     Object* raw_accessor =
   5915         IsLoad() ? Handle<AccessorPair>::cast(accessors)->getter()
   5916                  : Handle<AccessorPair>::cast(accessors)->setter();
   5917     if (!raw_accessor->IsJSFunction() &&
   5918         !raw_accessor->IsFunctionTemplateInfo())
   5919       return false;
   5920     Handle<Object> accessor = handle(HeapObject::cast(raw_accessor));
   5921     CallOptimization call_optimization(accessor);
   5922     if (call_optimization.is_simple_api_call()) {
   5923       CallOptimization::HolderLookup holder_lookup;
   5924       api_holder_ =
   5925           call_optimization.LookupHolderOfExpectedType(map_, &holder_lookup);
   5926     }
   5927     accessor_ = accessor;
   5928   } else if (IsDataConstant()) {
   5929     constant_ = GetConstantFromMap(map);
   5930   }
   5931 
   5932   return true;
   5933 }
   5934 
   5935 
   5936 bool HOptimizedGraphBuilder::PropertyAccessInfo::LoadFieldMaps(
   5937     Handle<Map> map) {
   5938   // Clear any previously collected field maps/type.
   5939   field_maps_.Clear();
   5940   field_type_ = HType::Tagged();
   5941 
   5942   // Figure out the field type from the accessor map.
   5943   Handle<FieldType> field_type = GetFieldTypeFromMap(map);
   5944 
   5945   // Collect the (stable) maps from the field type.
   5946   if (field_type->IsClass()) {
   5947     DCHECK(access_.representation().IsHeapObject());
   5948     Handle<Map> field_map = field_type->AsClass();
   5949     if (field_map->is_stable()) {
   5950       field_maps_.Add(field_map, zone());
   5951     }
   5952   }
   5953 
   5954   if (field_maps_.is_empty()) {
   5955     // Store is not safe if the field map was cleared.
   5956     return IsLoad() || !field_type->IsNone();
   5957   }
   5958 
   5959   // Determine field HType from field type.
   5960   field_type_ = HType::FromFieldType(field_type, zone());
   5961   DCHECK(field_type_.IsHeapObject());
   5962 
   5963   // Add dependency on the map that introduced the field.
   5964   top_info()->dependencies()->AssumeFieldOwner(GetFieldOwnerFromMap(map));
   5965   return true;
   5966 }
   5967 
   5968 
   5969 bool HOptimizedGraphBuilder::PropertyAccessInfo::LookupInPrototypes() {
   5970   Handle<Map> map = this->map();
   5971   if (name_->IsPrivate()) {
   5972     NotFound();
   5973     return !map->has_hidden_prototype();
   5974   }
   5975 
   5976   while (map->prototype()->IsJSObject()) {
   5977     holder_ = handle(JSObject::cast(map->prototype()));
   5978     if (holder_->map()->is_deprecated()) {
   5979       JSObject::TryMigrateInstance(holder_);
   5980     }
   5981     map = Handle<Map>(holder_->map());
   5982     if (!CanInlinePropertyAccess(map)) {
   5983       NotFound();
   5984       return false;
   5985     }
   5986     LookupDescriptor(*map, *name_);
   5987     if (IsFound()) return LoadResult(map);
   5988   }
   5989 
   5990   NotFound();
   5991   return !map->prototype()->IsJSReceiver();
   5992 }
   5993 
   5994 
   5995 bool HOptimizedGraphBuilder::PropertyAccessInfo::IsIntegerIndexedExotic() {
   5996   InstanceType instance_type = map_->instance_type();
   5997   return instance_type == JS_TYPED_ARRAY_TYPE && name_->IsString() &&
   5998          IsSpecialIndex(isolate()->unicode_cache(), String::cast(*name_));
   5999 }
   6000 
   6001 
   6002 bool HOptimizedGraphBuilder::PropertyAccessInfo::CanAccessMonomorphic() {
   6003   if (!CanInlinePropertyAccess(map_)) return false;
   6004   if (IsJSObjectFieldAccessor()) return IsLoad();
   6005   if (map_->IsJSFunctionMap() && map_->is_constructor() &&
   6006       !map_->has_non_instance_prototype() &&
   6007       name_.is_identical_to(isolate()->factory()->prototype_string())) {
   6008     return IsLoad();
   6009   }
   6010   if (!LookupDescriptor()) return false;
   6011   if (IsFound()) return IsLoad() || !IsReadOnly();
   6012   if (IsIntegerIndexedExotic()) return false;
   6013   if (!LookupInPrototypes()) return false;
   6014   if (IsLoad()) return true;
   6015 
   6016   if (IsAccessorConstant()) return true;
   6017   LookupTransition(*map_, *name_, NONE);
   6018   if (IsTransitionToData() && map_->unused_property_fields() > 0) {
   6019     // Construct the object field access.
   6020     int descriptor = transition()->LastAdded();
   6021     int index =
   6022         transition()->instance_descriptors()->GetFieldIndex(descriptor) -
   6023         map_->GetInObjectProperties();
   6024     PropertyDetails details =
   6025         transition()->instance_descriptors()->GetDetails(descriptor);
   6026     Representation representation = details.representation();
   6027     access_ = HObjectAccess::ForField(map_, index, representation, name_);
   6028 
   6029     // Load field map for heap objects.
   6030     return LoadFieldMaps(transition());
   6031   }
   6032   return false;
   6033 }
   6034 
   6035 
   6036 bool HOptimizedGraphBuilder::PropertyAccessInfo::CanAccessAsMonomorphic(
   6037     SmallMapList* maps) {
   6038   DCHECK(map_.is_identical_to(maps->first()));
   6039   if (!CanAccessMonomorphic()) return false;
   6040   STATIC_ASSERT(kMaxLoadPolymorphism == kMaxStorePolymorphism);
   6041   if (maps->length() > kMaxLoadPolymorphism) return false;
   6042   HObjectAccess access = HObjectAccess::ForMap();  // bogus default
   6043   if (GetJSObjectFieldAccess(&access)) {
   6044     for (int i = 1; i < maps->length(); ++i) {
   6045       PropertyAccessInfo test_info(builder_, access_type_, maps->at(i), name_);
   6046       HObjectAccess test_access = HObjectAccess::ForMap();  // bogus default
   6047       if (!test_info.GetJSObjectFieldAccess(&test_access)) return false;
   6048       if (!access.Equals(test_access)) return false;
   6049     }
   6050     return true;
   6051   }
   6052 
   6053   // Currently only handle numbers as a polymorphic case.
   6054   // TODO(verwaest): Support monomorphic handling of numbers with a HCheckNumber
   6055   // instruction.
   6056   if (IsNumberType()) return false;
   6057 
   6058   // Multiple maps cannot transition to the same target map.
   6059   DCHECK(!IsLoad() || !IsTransition());
   6060   if (IsTransition() && maps->length() > 1) return false;
   6061 
   6062   for (int i = 1; i < maps->length(); ++i) {
   6063     PropertyAccessInfo test_info(builder_, access_type_, maps->at(i), name_);
   6064     if (!test_info.IsCompatible(this)) return false;
   6065   }
   6066 
   6067   return true;
   6068 }
   6069 
   6070 
   6071 Handle<Map> HOptimizedGraphBuilder::PropertyAccessInfo::map() {
   6072   Handle<JSFunction> ctor;
   6073   if (Map::GetConstructorFunction(
   6074           map_, handle(current_info()->closure()->context()->native_context()))
   6075           .ToHandle(&ctor)) {
   6076     return handle(ctor->initial_map());
   6077   }
   6078   return map_;
   6079 }
   6080 
   6081 
   6082 static bool NeedsWrapping(Handle<Map> map, Handle<JSFunction> target) {
   6083   return !map->IsJSObjectMap() &&
   6084          is_sloppy(target->shared()->language_mode()) &&
   6085          !target->shared()->native();
   6086 }
   6087 
   6088 
   6089 bool HOptimizedGraphBuilder::PropertyAccessInfo::NeedsWrappingFor(
   6090     Handle<JSFunction> target) const {
   6091   return NeedsWrapping(map_, target);
   6092 }
   6093 
   6094 
   6095 HValue* HOptimizedGraphBuilder::BuildMonomorphicAccess(
   6096     PropertyAccessInfo* info, HValue* object, HValue* checked_object,
   6097     HValue* value, BailoutId ast_id, BailoutId return_id,
   6098     bool can_inline_accessor) {
   6099   HObjectAccess access = HObjectAccess::ForMap();  // bogus default
   6100   if (info->GetJSObjectFieldAccess(&access)) {
   6101     DCHECK(info->IsLoad());
   6102     return New<HLoadNamedField>(object, checked_object, access);
   6103   }
   6104 
   6105   if (info->name().is_identical_to(isolate()->factory()->prototype_string()) &&
   6106       info->map()->IsJSFunctionMap() && info->map()->is_constructor()) {
   6107     DCHECK(!info->map()->has_non_instance_prototype());
   6108     return New<HLoadFunctionPrototype>(checked_object);
   6109   }
   6110 
   6111   HValue* checked_holder = checked_object;
   6112   if (info->has_holder()) {
   6113     Handle<JSObject> prototype(JSObject::cast(info->map()->prototype()));
   6114     checked_holder = BuildCheckPrototypeMaps(prototype, info->holder());
   6115   }
   6116 
   6117   if (!info->IsFound()) {
   6118     DCHECK(info->IsLoad());
   6119     return graph()->GetConstantUndefined();
   6120   }
   6121 
   6122   if (info->IsData()) {
   6123     if (info->IsLoad()) {
   6124       return BuildLoadNamedField(info, checked_holder);
   6125     } else {
   6126       return BuildStoreNamedField(info, checked_object, value);
   6127     }
   6128   }
   6129 
   6130   if (info->IsTransition()) {
   6131     DCHECK(!info->IsLoad());
   6132     return BuildStoreNamedField(info, checked_object, value);
   6133   }
   6134 
   6135   if (info->IsAccessorConstant()) {
   6136     MaybeHandle<Name> maybe_name =
   6137         FunctionTemplateInfo::TryGetCachedPropertyName(isolate(),
   6138                                                        info->accessor());
   6139     if (!maybe_name.is_null()) {
   6140       Handle<Name> name = maybe_name.ToHandleChecked();
   6141       PropertyAccessInfo cache_info(this, LOAD, info->map(), name);
   6142       // Load new target.
   6143       if (cache_info.CanAccessMonomorphic()) {
   6144         return BuildLoadNamedField(&cache_info, checked_object);
   6145       }
   6146     }
   6147 
   6148     Push(checked_object);
   6149     int argument_count = 1;
   6150     if (!info->IsLoad()) {
   6151       argument_count = 2;
   6152       Push(value);
   6153     }
   6154 
   6155     if (info->accessor()->IsJSFunction() &&
   6156         info->NeedsWrappingFor(Handle<JSFunction>::cast(info->accessor()))) {
   6157       HValue* function = Add<HConstant>(info->accessor());
   6158       PushArgumentsFromEnvironment(argument_count);
   6159       return NewCallFunction(function, argument_count, TailCallMode::kDisallow,
   6160                              ConvertReceiverMode::kNotNullOrUndefined,
   6161                              TailCallMode::kDisallow);
   6162     } else if (FLAG_inline_accessors && can_inline_accessor) {
   6163       bool success = info->IsLoad()
   6164           ? TryInlineGetter(info->accessor(), info->map(), ast_id, return_id)
   6165           : TryInlineSetter(
   6166               info->accessor(), info->map(), ast_id, return_id, value);
   6167       if (success || HasStackOverflow()) return NULL;
   6168     }
   6169 
   6170     PushArgumentsFromEnvironment(argument_count);
   6171     if (!info->accessor()->IsJSFunction()) {
   6172       Bailout(kInliningBailedOut);
   6173       return nullptr;
   6174     }
   6175     return NewCallConstantFunction(Handle<JSFunction>::cast(info->accessor()),
   6176                                    argument_count, TailCallMode::kDisallow,
   6177                                    TailCallMode::kDisallow);
   6178   }
   6179 
   6180   DCHECK(info->IsDataConstant());
   6181   if (info->IsLoad()) {
   6182     return New<HConstant>(info->constant());
   6183   } else {
   6184     return New<HCheckValue>(value, Handle<JSFunction>::cast(info->constant()));
   6185   }
   6186 }
   6187 
   6188 
   6189 void HOptimizedGraphBuilder::HandlePolymorphicNamedFieldAccess(
   6190     PropertyAccessType access_type, Expression* expr, FeedbackVectorSlot slot,
   6191     BailoutId ast_id, BailoutId return_id, HValue* object, HValue* value,
   6192     SmallMapList* maps, Handle<Name> name) {
   6193   // Something did not match; must use a polymorphic load.
   6194   int count = 0;
   6195   HBasicBlock* join = NULL;
   6196   HBasicBlock* number_block = NULL;
   6197   bool handled_string = false;
   6198 
   6199   bool handle_smi = false;
   6200   STATIC_ASSERT(kMaxLoadPolymorphism == kMaxStorePolymorphism);
   6201   int i;
   6202   for (i = 0; i < maps->length() && count < kMaxLoadPolymorphism; ++i) {
   6203     PropertyAccessInfo info(this, access_type, maps->at(i), name);
   6204     if (info.IsStringType()) {
   6205       if (handled_string) continue;
   6206       handled_string = true;
   6207     }
   6208     if (info.CanAccessMonomorphic()) {
   6209       count++;
   6210       if (info.IsNumberType()) {
   6211         handle_smi = true;
   6212         break;
   6213       }
   6214     }
   6215   }
   6216 
   6217   if (i < maps->length()) {
   6218     count = -1;
   6219     maps->Clear();
   6220   } else {
   6221     count = 0;
   6222   }
   6223   HControlInstruction* smi_check = NULL;
   6224   handled_string = false;
   6225 
   6226   for (i = 0; i < maps->length() && count < kMaxLoadPolymorphism; ++i) {
   6227     PropertyAccessInfo info(this, access_type, maps->at(i), name);
   6228     if (info.IsStringType()) {
   6229       if (handled_string) continue;
   6230       handled_string = true;
   6231     }
   6232     if (!info.CanAccessMonomorphic()) continue;
   6233 
   6234     if (count == 0) {
   6235       join = graph()->CreateBasicBlock();
   6236       if (handle_smi) {
   6237         HBasicBlock* empty_smi_block = graph()->CreateBasicBlock();
   6238         HBasicBlock* not_smi_block = graph()->CreateBasicBlock();
   6239         number_block = graph()->CreateBasicBlock();
   6240         smi_check = New<HIsSmiAndBranch>(
   6241             object, empty_smi_block, not_smi_block);
   6242         FinishCurrentBlock(smi_check);
   6243         GotoNoSimulate(empty_smi_block, number_block);
   6244         set_current_block(not_smi_block);
   6245       } else {
   6246         BuildCheckHeapObject(object);
   6247       }
   6248     }
   6249     ++count;
   6250     HBasicBlock* if_true = graph()->CreateBasicBlock();
   6251     HBasicBlock* if_false = graph()->CreateBasicBlock();
   6252     HUnaryControlInstruction* compare;
   6253 
   6254     HValue* dependency;
   6255     if (info.IsNumberType()) {
   6256       Handle<Map> heap_number_map = isolate()->factory()->heap_number_map();
   6257       compare = New<HCompareMap>(object, heap_number_map, if_true, if_false);
   6258       dependency = smi_check;
   6259     } else if (info.IsStringType()) {
   6260       compare = New<HIsStringAndBranch>(object, if_true, if_false);
   6261       dependency = compare;
   6262     } else {
   6263       compare = New<HCompareMap>(object, info.map(), if_true, if_false);
   6264       dependency = compare;
   6265     }
   6266     FinishCurrentBlock(compare);
   6267 
   6268     if (info.IsNumberType()) {
   6269       GotoNoSimulate(if_true, number_block);
   6270       if_true = number_block;
   6271     }
   6272 
   6273     set_current_block(if_true);
   6274 
   6275     HValue* access =
   6276         BuildMonomorphicAccess(&info, object, dependency, value, ast_id,
   6277                                return_id, FLAG_polymorphic_inlining);
   6278 
   6279     HValue* result = NULL;
   6280     switch (access_type) {
   6281       case LOAD:
   6282         result = access;
   6283         break;
   6284       case STORE:
   6285         result = value;
   6286         break;
   6287     }
   6288 
   6289     if (access == NULL) {
   6290       if (HasStackOverflow()) return;
   6291     } else {
   6292       if (access->IsInstruction()) {
   6293         HInstruction* instr = HInstruction::cast(access);
   6294         if (!instr->IsLinked()) AddInstruction(instr);
   6295       }
   6296       if (!ast_context()->IsEffect()) Push(result);
   6297     }
   6298 
   6299     if (current_block() != NULL) Goto(join);
   6300     set_current_block(if_false);
   6301   }
   6302 
   6303   // Finish up.  Unconditionally deoptimize if we've handled all the maps we
   6304   // know about and do not want to handle ones we've never seen.  Otherwise
   6305   // use a generic IC.
   6306   if (count == maps->length() && FLAG_deoptimize_uncommon_cases) {
   6307     FinishExitWithHardDeoptimization(
   6308         DeoptimizeReason::kUnknownMapInPolymorphicAccess);
   6309   } else {
   6310     HInstruction* instr =
   6311         BuildNamedGeneric(access_type, expr, slot, object, name, value);
   6312     AddInstruction(instr);
   6313     if (!ast_context()->IsEffect()) Push(access_type == LOAD ? instr : value);
   6314 
   6315     if (join != NULL) {
   6316       Goto(join);
   6317     } else {
   6318       Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
   6319       if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop());
   6320       return;
   6321     }
   6322   }
   6323 
   6324   DCHECK(join != NULL);
   6325   if (join->HasPredecessor()) {
   6326     join->SetJoinId(ast_id);
   6327     set_current_block(join);
   6328     if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop());
   6329   } else {
   6330     set_current_block(NULL);
   6331   }
   6332 }
   6333 
   6334 static bool ComputeReceiverTypes(Expression* expr, HValue* receiver,
   6335                                  SmallMapList** t,
   6336                                  HOptimizedGraphBuilder* builder) {
   6337   Zone* zone = builder->zone();
   6338   SmallMapList* maps = expr->GetReceiverTypes();
   6339   *t = maps;
   6340   bool monomorphic = expr->IsMonomorphic();
   6341   if (maps != nullptr && receiver->HasMonomorphicJSObjectType()) {
   6342     if (maps->length() > 0) {
   6343       Map* root_map = receiver->GetMonomorphicJSObjectMap()->FindRootMap();
   6344       maps->FilterForPossibleTransitions(root_map);
   6345       monomorphic = maps->length() == 1;
   6346     } else {
   6347       // No type feedback, see if we can infer the type. This is safely
   6348       // possible if the receiver had a known map at some point, and no
   6349       // map-changing stores have happened to it since.
   6350       Handle<Map> candidate_map = receiver->GetMonomorphicJSObjectMap();
   6351       for (HInstruction* current = builder->current_block()->last();
   6352            current != nullptr; current = current->previous()) {
   6353         if (current->IsBlockEntry()) break;
   6354         if (current->CheckChangesFlag(kMaps)) {
   6355           // Only allow map changes that store the candidate map. We don't
   6356           // need to care which object the map is being written into.
   6357           if (!current->IsStoreNamedField()) break;
   6358           HStoreNamedField* map_change = HStoreNamedField::cast(current);
   6359           if (!map_change->value()->IsConstant()) break;
   6360           HConstant* map_constant = HConstant::cast(map_change->value());
   6361           if (!map_constant->representation().IsTagged()) break;
   6362           Handle<Object> map = map_constant->handle(builder->isolate());
   6363           if (!map.is_identical_to(candidate_map)) break;
   6364         }
   6365         if (current == receiver) {
   6366           // We made it all the way back to the receiver without encountering
   6367           // a map change! So we can assume that the receiver still has the
   6368           // candidate_map we know about.
   6369           maps->Add(candidate_map, zone);
   6370           monomorphic = true;
   6371           break;
   6372         }
   6373       }
   6374     }
   6375   }
   6376   return monomorphic && CanInlinePropertyAccess(maps->first());
   6377 }
   6378 
   6379 
   6380 static bool AreStringTypes(SmallMapList* maps) {
   6381   for (int i = 0; i < maps->length(); i++) {
   6382     if (maps->at(i)->instance_type() >= FIRST_NONSTRING_TYPE) return false;
   6383   }
   6384   return true;
   6385 }
   6386 
   6387 void HOptimizedGraphBuilder::BuildStore(Expression* expr, Property* prop,
   6388                                         FeedbackVectorSlot slot,
   6389                                         BailoutId ast_id, BailoutId return_id,
   6390                                         bool is_uninitialized) {
   6391   if (!prop->key()->IsPropertyName()) {
   6392     // Keyed store.
   6393     HValue* value = Pop();
   6394     HValue* key = Pop();
   6395     HValue* object = Pop();
   6396     bool has_side_effects = false;
   6397     HValue* result =
   6398         HandleKeyedElementAccess(object, key, value, expr, slot, ast_id,
   6399                                  return_id, STORE, &has_side_effects);
   6400     if (has_side_effects) {
   6401       if (!ast_context()->IsEffect()) Push(value);
   6402       Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
   6403       if (!ast_context()->IsEffect()) Drop(1);
   6404     }
   6405     if (result == NULL) return;
   6406     return ast_context()->ReturnValue(value);
   6407   }
   6408 
   6409   // Named store.
   6410   HValue* value = Pop();
   6411   HValue* object = Pop();
   6412 
   6413   Literal* key = prop->key()->AsLiteral();
   6414   Handle<String> name = Handle<String>::cast(key->value());
   6415   DCHECK(!name.is_null());
   6416 
   6417   HValue* access = BuildNamedAccess(STORE, ast_id, return_id, expr, slot,
   6418                                     object, name, value, is_uninitialized);
   6419   if (access == NULL) return;
   6420 
   6421   if (!ast_context()->IsEffect()) Push(value);
   6422   if (access->IsInstruction()) AddInstruction(HInstruction::cast(access));
   6423   if (access->HasObservableSideEffects()) {
   6424     Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
   6425   }
   6426   if (!ast_context()->IsEffect()) Drop(1);
   6427   return ast_context()->ReturnValue(value);
   6428 }
   6429 
   6430 
   6431 void HOptimizedGraphBuilder::HandlePropertyAssignment(Assignment* expr) {
   6432   Property* prop = expr->target()->AsProperty();
   6433   DCHECK(prop != NULL);
   6434   CHECK_ALIVE(VisitForValue(prop->obj()));
   6435   if (!prop->key()->IsPropertyName()) {
   6436     CHECK_ALIVE(VisitForValue(prop->key()));
   6437   }
   6438   CHECK_ALIVE(VisitForValue(expr->value()));
   6439   BuildStore(expr, prop, expr->AssignmentSlot(), expr->id(),
   6440              expr->AssignmentId(), expr->IsUninitialized());
   6441 }
   6442 
   6443 HInstruction* HOptimizedGraphBuilder::InlineGlobalPropertyStore(
   6444     LookupIterator* it, HValue* value, BailoutId ast_id) {
   6445   Handle<PropertyCell> cell = it->GetPropertyCell();
   6446   top_info()->dependencies()->AssumePropertyCell(cell);
   6447   auto cell_type = it->property_details().cell_type();
   6448   if (cell_type == PropertyCellType::kConstant ||
   6449       cell_type == PropertyCellType::kUndefined) {
   6450     Handle<Object> constant(cell->value(), isolate());
   6451     if (value->IsConstant()) {
   6452       HConstant* c_value = HConstant::cast(value);
   6453       if (!constant.is_identical_to(c_value->handle(isolate()))) {
   6454         Add<HDeoptimize>(DeoptimizeReason::kConstantGlobalVariableAssignment,
   6455                          Deoptimizer::EAGER);
   6456       }
   6457     } else {
   6458       HValue* c_constant = Add<HConstant>(constant);
   6459       IfBuilder builder(this);
   6460       if (constant->IsNumber()) {
   6461         builder.If<HCompareNumericAndBranch>(value, c_constant, Token::EQ);
   6462       } else {
   6463         builder.If<HCompareObjectEqAndBranch>(value, c_constant);
   6464       }
   6465       builder.Then();
   6466       builder.Else();
   6467       Add<HDeoptimize>(DeoptimizeReason::kConstantGlobalVariableAssignment,
   6468                        Deoptimizer::EAGER);
   6469       builder.End();
   6470     }
   6471   }
   6472   HConstant* cell_constant = Add<HConstant>(cell);
   6473   auto access = HObjectAccess::ForPropertyCellValue();
   6474   if (cell_type == PropertyCellType::kConstantType) {
   6475     switch (cell->GetConstantType()) {
   6476       case PropertyCellConstantType::kSmi:
   6477         access = access.WithRepresentation(Representation::Smi());
   6478         break;
   6479       case PropertyCellConstantType::kStableMap: {
   6480         // First check that the previous value of the {cell} still has the
   6481         // map that we are about to check the new {value} for. If not, then
   6482         // the stable map assumption was invalidated and we cannot continue
   6483         // with the optimized code.
   6484         Handle<HeapObject> cell_value(HeapObject::cast(cell->value()));
   6485         Handle<Map> cell_value_map(cell_value->map());
   6486         if (!cell_value_map->is_stable()) {
   6487           Bailout(kUnstableConstantTypeHeapObject);
   6488           return nullptr;
   6489         }
   6490         top_info()->dependencies()->AssumeMapStable(cell_value_map);
   6491         // Now check that the new {value} is a HeapObject with the same map
   6492         Add<HCheckHeapObject>(value);
   6493         value = Add<HCheckMaps>(value, cell_value_map);
   6494         access = access.WithRepresentation(Representation::HeapObject());
   6495         break;
   6496       }
   6497     }
   6498   }
   6499   HInstruction* instr = New<HStoreNamedField>(cell_constant, access, value);
   6500   instr->ClearChangesFlag(kInobjectFields);
   6501   instr->SetChangesFlag(kGlobalVars);
   6502   return instr;
   6503 }
   6504 
   6505 // Because not every expression has a position and there is not common
   6506 // superclass of Assignment and CountOperation, we cannot just pass the
   6507 // owning expression instead of position and ast_id separately.
   6508 void HOptimizedGraphBuilder::HandleGlobalVariableAssignment(
   6509     Variable* var, HValue* value, FeedbackVectorSlot slot, BailoutId ast_id) {
   6510   Handle<JSGlobalObject> global(current_info()->global_object());
   6511 
   6512   // Lookup in script contexts.
   6513   {
   6514     Handle<ScriptContextTable> script_contexts(
   6515         global->native_context()->script_context_table());
   6516     ScriptContextTable::LookupResult lookup;
   6517     if (ScriptContextTable::Lookup(script_contexts, var->name(), &lookup)) {
   6518       if (lookup.mode == CONST) {
   6519         return Bailout(kNonInitializerAssignmentToConst);
   6520       }
   6521       Handle<Context> script_context =
   6522           ScriptContextTable::GetContext(script_contexts, lookup.context_index);
   6523 
   6524       Handle<Object> current_value =
   6525           FixedArray::get(*script_context, lookup.slot_index, isolate());
   6526 
   6527       // If the values is not the hole, it will stay initialized,
   6528       // so no need to generate a check.
   6529       if (current_value->IsTheHole(isolate())) {
   6530         return Bailout(kReferenceToUninitializedVariable);
   6531       }
   6532 
   6533       HStoreNamedField* instr = Add<HStoreNamedField>(
   6534           Add<HConstant>(script_context),
   6535           HObjectAccess::ForContextSlot(lookup.slot_index), value);
   6536       USE(instr);
   6537       DCHECK(instr->HasObservableSideEffects());
   6538       Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
   6539       return;
   6540     }
   6541   }
   6542 
   6543   LookupIterator it(global, var->name(), LookupIterator::OWN);
   6544   if (CanInlineGlobalPropertyAccess(var, &it, STORE)) {
   6545     HInstruction* instr = InlineGlobalPropertyStore(&it, value, ast_id);
   6546     if (!instr) return;
   6547     AddInstruction(instr);
   6548     if (instr->HasObservableSideEffects()) {
   6549       Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
   6550     }
   6551   } else {
   6552     HValue* global_object = Add<HLoadNamedField>(
   6553         BuildGetNativeContext(), nullptr,
   6554         HObjectAccess::ForContextSlot(Context::EXTENSION_INDEX));
   6555     Handle<TypeFeedbackVector> vector =
   6556         handle(current_feedback_vector(), isolate());
   6557     HValue* name = Add<HConstant>(var->name());
   6558     HValue* vector_value = Add<HConstant>(vector);
   6559     HValue* slot_value = Add<HConstant>(vector->GetIndex(slot));
   6560     Callable callable = CodeFactory::StoreICInOptimizedCode(
   6561         isolate(), function_language_mode());
   6562     HValue* stub = Add<HConstant>(callable.code());
   6563     HValue* values[] = {global_object, name, value, slot_value, vector_value};
   6564     HCallWithDescriptor* instr = Add<HCallWithDescriptor>(
   6565         Code::STORE_IC, stub, 0, callable.descriptor(), ArrayVector(values));
   6566     USE(instr);
   6567     DCHECK(instr->HasObservableSideEffects());
   6568     Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
   6569   }
   6570 }
   6571 
   6572 
   6573 void HOptimizedGraphBuilder::HandleCompoundAssignment(Assignment* expr) {
   6574   Expression* target = expr->target();
   6575   VariableProxy* proxy = target->AsVariableProxy();
   6576   Property* prop = target->AsProperty();
   6577   DCHECK(proxy == NULL || prop == NULL);
   6578 
   6579   // We have a second position recorded in the FullCodeGenerator to have
   6580   // type feedback for the binary operation.
   6581   BinaryOperation* operation = expr->binary_operation();
   6582 
   6583   if (proxy != NULL) {
   6584     Variable* var = proxy->var();
   6585     if (var->mode() == LET)  {
   6586       return Bailout(kUnsupportedLetCompoundAssignment);
   6587     }
   6588 
   6589     CHECK_ALIVE(VisitForValue(operation));
   6590 
   6591     switch (var->location()) {
   6592       case VariableLocation::UNALLOCATED:
   6593         HandleGlobalVariableAssignment(var, Top(), expr->AssignmentSlot(),
   6594                                        expr->AssignmentId());
   6595         break;
   6596 
   6597       case VariableLocation::PARAMETER:
   6598       case VariableLocation::LOCAL:
   6599         if (var->mode() == CONST) {
   6600           return Bailout(kNonInitializerAssignmentToConst);
   6601         }
   6602         BindIfLive(var, Top());
   6603         break;
   6604 
   6605       case VariableLocation::CONTEXT: {
   6606         // Bail out if we try to mutate a parameter value in a function
   6607         // using the arguments object.  We do not (yet) correctly handle the
   6608         // arguments property of the function.
   6609         if (current_info()->scope()->arguments() != NULL) {
   6610           // Parameters will be allocated to context slots.  We have no
   6611           // direct way to detect that the variable is a parameter so we do
   6612           // a linear search of the parameter variables.
   6613           int count = current_info()->scope()->num_parameters();
   6614           for (int i = 0; i < count; ++i) {
   6615             if (var == current_info()->scope()->parameter(i)) {
   6616               Bailout(kAssignmentToParameterFunctionUsesArgumentsObject);
   6617             }
   6618           }
   6619         }
   6620 
   6621         HStoreContextSlot::Mode mode;
   6622 
   6623         switch (var->mode()) {
   6624           case LET:
   6625             mode = HStoreContextSlot::kCheckDeoptimize;
   6626             break;
   6627           case CONST:
   6628             if (var->throw_on_const_assignment(function_language_mode())) {
   6629               return Bailout(kNonInitializerAssignmentToConst);
   6630             } else {
   6631               return ast_context()->ReturnValue(Pop());
   6632             }
   6633           default:
   6634             mode = HStoreContextSlot::kNoCheck;
   6635         }
   6636 
   6637         HValue* context = BuildContextChainWalk(var);
   6638         HStoreContextSlot* instr = Add<HStoreContextSlot>(
   6639             context, var->index(), mode, Top());
   6640         if (instr->HasObservableSideEffects()) {
   6641           Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE);
   6642         }
   6643         break;
   6644       }
   6645 
   6646       case VariableLocation::LOOKUP:
   6647         return Bailout(kCompoundAssignmentToLookupSlot);
   6648 
   6649       case VariableLocation::MODULE:
   6650         UNREACHABLE();
   6651     }
   6652     return ast_context()->ReturnValue(Pop());
   6653 
   6654   } else if (prop != NULL) {
   6655     CHECK_ALIVE(VisitForValue(prop->obj()));
   6656     HValue* object = Top();
   6657     HValue* key = NULL;
   6658     if (!prop->key()->IsPropertyName() || prop->IsStringAccess()) {
   6659       CHECK_ALIVE(VisitForValue(prop->key()));
   6660       key = Top();
   6661     }
   6662 
   6663     CHECK_ALIVE(PushLoad(prop, object, key));
   6664 
   6665     CHECK_ALIVE(VisitForValue(expr->value()));
   6666     HValue* right = Pop();
   6667     HValue* left = Pop();
   6668 
   6669     Push(BuildBinaryOperation(operation, left, right, PUSH_BEFORE_SIMULATE));
   6670 
   6671     BuildStore(expr, prop, expr->AssignmentSlot(), expr->id(),
   6672                expr->AssignmentId(), expr->IsUninitialized());
   6673   } else {
   6674     return Bailout(kInvalidLhsInCompoundAssignment);
   6675   }
   6676 }
   6677 
   6678 
   6679 void HOptimizedGraphBuilder::VisitAssignment(Assignment* expr) {
   6680   DCHECK(!HasStackOverflow());
   6681   DCHECK(current_block() != NULL);
   6682   DCHECK(current_block()->HasPredecessor());
   6683 
   6684   VariableProxy* proxy = expr->target()->AsVariableProxy();
   6685   Property* prop = expr->target()->AsProperty();
   6686   DCHECK(proxy == NULL || prop == NULL);
   6687 
   6688   if (expr->is_compound()) {
   6689     HandleCompoundAssignment(expr);
   6690     return;
   6691   }
   6692 
   6693   if (prop != NULL) {
   6694     HandlePropertyAssignment(expr);
   6695   } else if (proxy != NULL) {
   6696     Variable* var = proxy->var();
   6697 
   6698     if (var->mode() == CONST) {
   6699       if (expr->op() != Token::INIT) {
   6700         if (var->throw_on_const_assignment(function_language_mode())) {
   6701           return Bailout(kNonInitializerAssignmentToConst);
   6702         } else {
   6703           CHECK_ALIVE(VisitForValue(expr->value()));
   6704           return ast_context()->ReturnValue(Pop());
   6705         }
   6706       }
   6707     }
   6708 
   6709     // Handle the assignment.
   6710     switch (var->location()) {
   6711       case VariableLocation::UNALLOCATED:
   6712         CHECK_ALIVE(VisitForValue(expr->value()));
   6713         HandleGlobalVariableAssignment(var, Top(), expr->AssignmentSlot(),
   6714                                        expr->AssignmentId());
   6715         return ast_context()->ReturnValue(Pop());
   6716 
   6717       case VariableLocation::PARAMETER:
   6718       case VariableLocation::LOCAL: {
   6719         // Perform an initialization check for let declared variables
   6720         // or parameters.
   6721         if (var->mode() == LET && expr->op() == Token::ASSIGN) {
   6722           HValue* env_value = environment()->Lookup(var);
   6723           if (env_value == graph()->GetConstantHole()) {
   6724             return Bailout(kAssignmentToLetVariableBeforeInitialization);
   6725           }
   6726         }
   6727         // We do not allow the arguments object to occur in a context where it
   6728         // may escape, but assignments to stack-allocated locals are
   6729         // permitted.
   6730         CHECK_ALIVE(VisitForValue(expr->value(), ARGUMENTS_ALLOWED));
   6731         HValue* value = Pop();
   6732         BindIfLive(var, value);
   6733         return ast_context()->ReturnValue(value);
   6734       }
   6735 
   6736       case VariableLocation::CONTEXT: {
   6737         // Bail out if we try to mutate a parameter value in a function using
   6738         // the arguments object.  We do not (yet) correctly handle the
   6739         // arguments property of the function.
   6740         if (current_info()->scope()->arguments() != NULL) {
   6741           // Parameters will rewrite to context slots.  We have no direct way
   6742           // to detect that the variable is a parameter.
   6743           int count = current_info()->scope()->num_parameters();
   6744           for (int i = 0; i < count; ++i) {
   6745             if (var == current_info()->scope()->parameter(i)) {
   6746               return Bailout(kAssignmentToParameterInArgumentsObject);
   6747             }
   6748           }
   6749         }
   6750 
   6751         CHECK_ALIVE(VisitForValue(expr->value()));
   6752         HStoreContextSlot::Mode mode;
   6753         if (expr->op() == Token::ASSIGN) {
   6754           switch (var->mode()) {
   6755             case LET:
   6756               mode = HStoreContextSlot::kCheckDeoptimize;
   6757               break;
   6758             case CONST:
   6759               // If we reached this point, the only possibility
   6760               // is a sloppy assignment to a function name.
   6761               DCHECK(function_language_mode() == SLOPPY &&
   6762                      !var->throw_on_const_assignment(SLOPPY));
   6763               return ast_context()->ReturnValue(Pop());
   6764             default:
   6765               mode = HStoreContextSlot::kNoCheck;
   6766           }
   6767         } else {
   6768           DCHECK_EQ(Token::INIT, expr->op());
   6769           mode = HStoreContextSlot::kNoCheck;
   6770         }
   6771 
   6772         HValue* context = BuildContextChainWalk(var);
   6773         HStoreContextSlot* instr = Add<HStoreContextSlot>(
   6774             context, var->index(), mode, Top());
   6775         if (instr->HasObservableSideEffects()) {
   6776           Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE);
   6777         }
   6778         return ast_context()->ReturnValue(Pop());
   6779       }
   6780 
   6781       case VariableLocation::LOOKUP:
   6782         return Bailout(kAssignmentToLOOKUPVariable);
   6783 
   6784       case VariableLocation::MODULE:
   6785         UNREACHABLE();
   6786     }
   6787   } else {
   6788     return Bailout(kInvalidLeftHandSideInAssignment);
   6789   }
   6790 }
   6791 
   6792 
   6793 void HOptimizedGraphBuilder::VisitYield(Yield* expr) {
   6794   // Generators are not optimized, so we should never get here.
   6795   UNREACHABLE();
   6796 }
   6797 
   6798 
   6799 void HOptimizedGraphBuilder::VisitThrow(Throw* expr) {
   6800   DCHECK(!HasStackOverflow());
   6801   DCHECK(current_block() != NULL);
   6802   DCHECK(current_block()->HasPredecessor());
   6803   if (!ast_context()->IsEffect()) {
   6804     // The parser turns invalid left-hand sides in assignments into throw
   6805     // statements, which may not be in effect contexts. We might still try
   6806     // to optimize such functions; bail out now if we do.
   6807     return Bailout(kInvalidLeftHandSideInAssignment);
   6808   }
   6809   CHECK_ALIVE(VisitForValue(expr->exception()));
   6810 
   6811   HValue* value = environment()->Pop();
   6812   if (!is_tracking_positions()) SetSourcePosition(expr->position());
   6813   Add<HPushArguments>(value);
   6814   Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kThrow), 1);
   6815   Add<HSimulate>(expr->id());
   6816 
   6817   // If the throw definitely exits the function, we can finish with a dummy
   6818   // control flow at this point.  This is not the case if the throw is inside
   6819   // an inlined function which may be replaced.
   6820   if (call_context() == NULL) {
   6821     FinishExitCurrentBlock(New<HAbnormalExit>());
   6822   }
   6823 }
   6824 
   6825 
   6826 HInstruction* HGraphBuilder::AddLoadStringInstanceType(HValue* string) {
   6827   if (string->IsConstant()) {
   6828     HConstant* c_string = HConstant::cast(string);
   6829     if (c_string->HasStringValue()) {
   6830       return Add<HConstant>(c_string->StringValue()->map()->instance_type());
   6831     }
   6832   }
   6833   return Add<HLoadNamedField>(
   6834       Add<HLoadNamedField>(string, nullptr, HObjectAccess::ForMap()), nullptr,
   6835       HObjectAccess::ForMapInstanceType());
   6836 }
   6837 
   6838 
   6839 HInstruction* HGraphBuilder::AddLoadStringLength(HValue* string) {
   6840   return AddInstruction(BuildLoadStringLength(string));
   6841 }
   6842 
   6843 
   6844 HInstruction* HGraphBuilder::BuildLoadStringLength(HValue* string) {
   6845   if (string->IsConstant()) {
   6846     HConstant* c_string = HConstant::cast(string);
   6847     if (c_string->HasStringValue()) {
   6848       return New<HConstant>(c_string->StringValue()->length());
   6849     }
   6850   }
   6851   return New<HLoadNamedField>(string, nullptr,
   6852                               HObjectAccess::ForStringLength());
   6853 }
   6854 
   6855 
   6856 HInstruction* HOptimizedGraphBuilder::BuildNamedGeneric(
   6857     PropertyAccessType access_type, Expression* expr, FeedbackVectorSlot slot,
   6858     HValue* object, Handle<Name> name, HValue* value, bool is_uninitialized) {
   6859   if (is_uninitialized) {
   6860     Add<HDeoptimize>(
   6861         DeoptimizeReason::kInsufficientTypeFeedbackForGenericNamedAccess,
   6862         Deoptimizer::SOFT);
   6863   }
   6864   Handle<TypeFeedbackVector> vector(current_feedback_vector(), isolate());
   6865 
   6866   HValue* key = Add<HConstant>(name);
   6867   HValue* vector_value = Add<HConstant>(vector);
   6868   HValue* slot_value = Add<HConstant>(vector->GetIndex(slot));
   6869 
   6870   if (access_type == LOAD) {
   6871     HValue* values[] = {object, key, slot_value, vector_value};
   6872     if (!expr->AsProperty()->key()->IsPropertyName()) {
   6873       // It's possible that a keyed load of a constant string was converted
   6874       // to a named load. Here, at the last minute, we need to make sure to
   6875       // use a generic Keyed Load if we are using the type vector, because
   6876       // it has to share information with full code.
   6877       Callable callable = CodeFactory::KeyedLoadICInOptimizedCode(isolate());
   6878       HValue* stub = Add<HConstant>(callable.code());
   6879       HCallWithDescriptor* result =
   6880           New<HCallWithDescriptor>(Code::KEYED_LOAD_IC, stub, 0,
   6881                                    callable.descriptor(), ArrayVector(values));
   6882       return result;
   6883     }
   6884     Callable callable = CodeFactory::LoadICInOptimizedCode(isolate());
   6885     HValue* stub = Add<HConstant>(callable.code());
   6886     HCallWithDescriptor* result = New<HCallWithDescriptor>(
   6887         Code::LOAD_IC, stub, 0, callable.descriptor(), ArrayVector(values));
   6888     return result;
   6889 
   6890   } else {
   6891     HValue* values[] = {object, key, value, slot_value, vector_value};
   6892     if (vector->GetKind(slot) == FeedbackVectorSlotKind::KEYED_STORE_IC) {
   6893       // It's possible that a keyed store of a constant string was converted
   6894       // to a named store. Here, at the last minute, we need to make sure to
   6895       // use a generic Keyed Store if we are using the type vector, because
   6896       // it has to share information with full code.
   6897       Callable callable = CodeFactory::KeyedStoreICInOptimizedCode(
   6898           isolate(), function_language_mode());
   6899       HValue* stub = Add<HConstant>(callable.code());
   6900       HCallWithDescriptor* result =
   6901           New<HCallWithDescriptor>(Code::KEYED_STORE_IC, stub, 0,
   6902                                    callable.descriptor(), ArrayVector(values));
   6903       return result;
   6904     }
   6905     Callable callable = CodeFactory::StoreICInOptimizedCode(
   6906         isolate(), function_language_mode());
   6907     HValue* stub = Add<HConstant>(callable.code());
   6908     HCallWithDescriptor* result = New<HCallWithDescriptor>(
   6909         Code::STORE_IC, stub, 0, callable.descriptor(), ArrayVector(values));
   6910     return result;
   6911   }
   6912 }
   6913 
   6914 
   6915 HInstruction* HOptimizedGraphBuilder::BuildKeyedGeneric(
   6916     PropertyAccessType access_type, Expression* expr, FeedbackVectorSlot slot,
   6917     HValue* object, HValue* key, HValue* value) {
   6918   Handle<TypeFeedbackVector> vector(current_feedback_vector(), isolate());
   6919   HValue* vector_value = Add<HConstant>(vector);
   6920   HValue* slot_value = Add<HConstant>(vector->GetIndex(slot));
   6921 
   6922   if (access_type == LOAD) {
   6923     HValue* values[] = {object, key, slot_value, vector_value};
   6924 
   6925     Callable callable = CodeFactory::KeyedLoadICInOptimizedCode(isolate());
   6926     HValue* stub = Add<HConstant>(callable.code());
   6927     HCallWithDescriptor* result =
   6928         New<HCallWithDescriptor>(Code::KEYED_LOAD_IC, stub, 0,
   6929                                  callable.descriptor(), ArrayVector(values));
   6930     return result;
   6931   } else {
   6932     HValue* values[] = {object, key, value, slot_value, vector_value};
   6933 
   6934     Callable callable = CodeFactory::KeyedStoreICInOptimizedCode(
   6935         isolate(), function_language_mode());
   6936     HValue* stub = Add<HConstant>(callable.code());
   6937     HCallWithDescriptor* result =
   6938         New<HCallWithDescriptor>(Code::KEYED_STORE_IC, stub, 0,
   6939                                  callable.descriptor(), ArrayVector(values));
   6940     return result;
   6941   }
   6942 }
   6943 
   6944 
   6945 LoadKeyedHoleMode HOptimizedGraphBuilder::BuildKeyedHoleMode(Handle<Map> map) {
   6946   // Loads from a "stock" fast holey double arrays can elide the hole check.
   6947   // Loads from a "stock" fast holey array can convert the hole to undefined
   6948   // with impunity.
   6949   LoadKeyedHoleMode load_mode = NEVER_RETURN_HOLE;
   6950   bool holey_double_elements =
   6951       *map == isolate()->get_initial_js_array_map(FAST_HOLEY_DOUBLE_ELEMENTS);
   6952   bool holey_elements =
   6953       *map == isolate()->get_initial_js_array_map(FAST_HOLEY_ELEMENTS);
   6954   if ((holey_double_elements || holey_elements) &&
   6955       isolate()->IsFastArrayConstructorPrototypeChainIntact()) {
   6956     load_mode =
   6957         holey_double_elements ? ALLOW_RETURN_HOLE : CONVERT_HOLE_TO_UNDEFINED;
   6958 
   6959     Handle<JSObject> prototype(JSObject::cast(map->prototype()), isolate());
   6960     Handle<JSObject> object_prototype = isolate()->initial_object_prototype();
   6961     BuildCheckPrototypeMaps(prototype, object_prototype);
   6962     graph()->MarkDependsOnEmptyArrayProtoElements();
   6963   }
   6964   return load_mode;
   6965 }
   6966 
   6967 
   6968 HInstruction* HOptimizedGraphBuilder::BuildMonomorphicElementAccess(
   6969     HValue* object,
   6970     HValue* key,
   6971     HValue* val,
   6972     HValue* dependency,
   6973     Handle<Map> map,
   6974     PropertyAccessType access_type,
   6975     KeyedAccessStoreMode store_mode) {
   6976   HCheckMaps* checked_object = Add<HCheckMaps>(object, map, dependency);
   6977 
   6978   if (access_type == STORE && map->prototype()->IsJSObject()) {
   6979     // monomorphic stores need a prototype chain check because shape
   6980     // changes could allow callbacks on elements in the chain that
   6981     // aren't compatible with monomorphic keyed stores.
   6982     PrototypeIterator iter(map);
   6983     JSObject* holder = NULL;
   6984     while (!iter.IsAtEnd()) {
   6985       // JSProxies can't occur here because we wouldn't have installed a
   6986       // non-generic IC if there were any.
   6987       holder = *PrototypeIterator::GetCurrent<JSObject>(iter);
   6988       iter.Advance();
   6989     }
   6990     DCHECK(holder && holder->IsJSObject());
   6991 
   6992     BuildCheckPrototypeMaps(handle(JSObject::cast(map->prototype())),
   6993                             Handle<JSObject>(holder));
   6994   }
   6995 
   6996   LoadKeyedHoleMode load_mode = BuildKeyedHoleMode(map);
   6997   return BuildUncheckedMonomorphicElementAccess(
   6998       checked_object, key, val,
   6999       map->instance_type() == JS_ARRAY_TYPE,
   7000       map->elements_kind(), access_type,
   7001       load_mode, store_mode);
   7002 }
   7003 
   7004 
   7005 static bool CanInlineElementAccess(Handle<Map> map) {
   7006   return map->IsJSObjectMap() &&
   7007          (map->has_fast_elements() || map->has_fixed_typed_array_elements()) &&
   7008          !map->has_indexed_interceptor() && !map->is_access_check_needed();
   7009 }
   7010 
   7011 
   7012 HInstruction* HOptimizedGraphBuilder::TryBuildConsolidatedElementLoad(
   7013     HValue* object,
   7014     HValue* key,
   7015     HValue* val,
   7016     SmallMapList* maps) {
   7017   // For polymorphic loads of similar elements kinds (i.e. all tagged or all
   7018   // double), always use the "worst case" code without a transition.  This is
   7019   // much faster than transitioning the elements to the worst case, trading a
   7020   // HTransitionElements for a HCheckMaps, and avoiding mutation of the array.
   7021   bool has_double_maps = false;
   7022   bool has_smi_or_object_maps = false;
   7023   bool has_js_array_access = false;
   7024   bool has_non_js_array_access = false;
   7025   bool has_seen_holey_elements = false;
   7026   Handle<Map> most_general_consolidated_map;
   7027   for (int i = 0; i < maps->length(); ++i) {
   7028     Handle<Map> map = maps->at(i);
   7029     if (!CanInlineElementAccess(map)) return NULL;
   7030     // Don't allow mixing of JSArrays with JSObjects.
   7031     if (map->instance_type() == JS_ARRAY_TYPE) {
   7032       if (has_non_js_array_access) return NULL;
   7033       has_js_array_access = true;
   7034     } else if (has_js_array_access) {
   7035       return NULL;
   7036     } else {
   7037       has_non_js_array_access = true;
   7038     }
   7039     // Don't allow mixed, incompatible elements kinds.
   7040     if (map->has_fast_double_elements()) {
   7041       if (has_smi_or_object_maps) return NULL;
   7042       has_double_maps = true;
   7043     } else if (map->has_fast_smi_or_object_elements()) {
   7044       if (has_double_maps) return NULL;
   7045       has_smi_or_object_maps = true;
   7046     } else {
   7047       return NULL;
   7048     }
   7049     // Remember if we've ever seen holey elements.
   7050     if (IsHoleyElementsKind(map->elements_kind())) {
   7051       has_seen_holey_elements = true;
   7052     }
   7053     // Remember the most general elements kind, the code for its load will
   7054     // properly handle all of the more specific cases.
   7055     if ((i == 0) || IsMoreGeneralElementsKindTransition(
   7056             most_general_consolidated_map->elements_kind(),
   7057             map->elements_kind())) {
   7058       most_general_consolidated_map = map;
   7059     }
   7060   }
   7061   if (!has_double_maps && !has_smi_or_object_maps) return NULL;
   7062 
   7063   HCheckMaps* checked_object = Add<HCheckMaps>(object, maps);
   7064   // FAST_ELEMENTS is considered more general than FAST_HOLEY_SMI_ELEMENTS.
   7065   // If we've seen both, the consolidated load must use FAST_HOLEY_ELEMENTS.
   7066   ElementsKind consolidated_elements_kind = has_seen_holey_elements
   7067       ? GetHoleyElementsKind(most_general_consolidated_map->elements_kind())
   7068       : most_general_consolidated_map->elements_kind();
   7069   LoadKeyedHoleMode load_mode = NEVER_RETURN_HOLE;
   7070   if (has_seen_holey_elements) {
   7071     // Make sure that all of the maps we are handling have the initial array
   7072     // prototype.
   7073     bool saw_non_array_prototype = false;
   7074     for (int i = 0; i < maps->length(); ++i) {
   7075       Handle<Map> map = maps->at(i);
   7076       if (map->prototype() != *isolate()->initial_array_prototype()) {
   7077         // We can't guarantee that loading the hole is safe. The prototype may
   7078         // have an element at this position.
   7079         saw_non_array_prototype = true;
   7080         break;
   7081       }
   7082     }
   7083 
   7084     if (!saw_non_array_prototype) {
   7085       Handle<Map> holey_map = handle(
   7086           isolate()->get_initial_js_array_map(consolidated_elements_kind));
   7087       load_mode = BuildKeyedHoleMode(holey_map);
   7088       if (load_mode != NEVER_RETURN_HOLE) {
   7089         for (int i = 0; i < maps->length(); ++i) {
   7090           Handle<Map> map = maps->at(i);
   7091           // The prototype check was already done for the holey map in
   7092           // BuildKeyedHoleMode.
   7093           if (!map.is_identical_to(holey_map)) {
   7094             Handle<JSObject> prototype(JSObject::cast(map->prototype()),
   7095                                        isolate());
   7096             Handle<JSObject> object_prototype =
   7097                 isolate()->initial_object_prototype();
   7098             BuildCheckPrototypeMaps(prototype, object_prototype);
   7099           }
   7100         }
   7101       }
   7102     }
   7103   }
   7104   HInstruction* instr = BuildUncheckedMonomorphicElementAccess(
   7105       checked_object, key, val,
   7106       most_general_consolidated_map->instance_type() == JS_ARRAY_TYPE,
   7107       consolidated_elements_kind, LOAD, load_mode, STANDARD_STORE);
   7108   return instr;
   7109 }
   7110 
   7111 
   7112 HValue* HOptimizedGraphBuilder::HandlePolymorphicElementAccess(
   7113     Expression* expr, FeedbackVectorSlot slot, HValue* object, HValue* key,
   7114     HValue* val, SmallMapList* maps, PropertyAccessType access_type,
   7115     KeyedAccessStoreMode store_mode, bool* has_side_effects) {
   7116   *has_side_effects = false;
   7117   BuildCheckHeapObject(object);
   7118 
   7119   if (access_type == LOAD) {
   7120     HInstruction* consolidated_load =
   7121         TryBuildConsolidatedElementLoad(object, key, val, maps);
   7122     if (consolidated_load != NULL) {
   7123       *has_side_effects |= consolidated_load->HasObservableSideEffects();
   7124       return consolidated_load;
   7125     }
   7126   }
   7127 
   7128   // Elements_kind transition support.
   7129   MapHandleList transition_target(maps->length());
   7130   // Collect possible transition targets.
   7131   MapHandleList possible_transitioned_maps(maps->length());
   7132   for (int i = 0; i < maps->length(); ++i) {
   7133     Handle<Map> map = maps->at(i);
   7134     // Loads from strings or loads with a mix of string and non-string maps
   7135     // shouldn't be handled polymorphically.
   7136     DCHECK(access_type != LOAD || !map->IsStringMap());
   7137     ElementsKind elements_kind = map->elements_kind();
   7138     if (CanInlineElementAccess(map) && IsFastElementsKind(elements_kind) &&
   7139         elements_kind != GetInitialFastElementsKind()) {
   7140       possible_transitioned_maps.Add(map);
   7141     }
   7142     if (IsSloppyArgumentsElements(elements_kind)) {
   7143       HInstruction* result =
   7144           BuildKeyedGeneric(access_type, expr, slot, object, key, val);
   7145       *has_side_effects = result->HasObservableSideEffects();
   7146       return AddInstruction(result);
   7147     }
   7148   }
   7149   // Get transition target for each map (NULL == no transition).
   7150   for (int i = 0; i < maps->length(); ++i) {
   7151     Handle<Map> map = maps->at(i);
   7152     Map* transitioned_map =
   7153         map->FindElementsKindTransitionedMap(&possible_transitioned_maps);
   7154     if (transitioned_map != nullptr) {
   7155       transition_target.Add(handle(transitioned_map));
   7156     } else {
   7157       transition_target.Add(Handle<Map>());
   7158     }
   7159   }
   7160 
   7161   MapHandleList untransitionable_maps(maps->length());
   7162   HTransitionElementsKind* transition = NULL;
   7163   for (int i = 0; i < maps->length(); ++i) {
   7164     Handle<Map> map = maps->at(i);
   7165     DCHECK(map->IsMap());
   7166     if (!transition_target.at(i).is_null()) {
   7167       DCHECK(Map::IsValidElementsTransition(
   7168           map->elements_kind(),
   7169           transition_target.at(i)->elements_kind()));
   7170       transition = Add<HTransitionElementsKind>(object, map,
   7171                                                 transition_target.at(i));
   7172     } else {
   7173       untransitionable_maps.Add(map);
   7174     }
   7175   }
   7176 
   7177   // If only one map is left after transitioning, handle this case
   7178   // monomorphically.
   7179   DCHECK(untransitionable_maps.length() >= 1);
   7180   if (untransitionable_maps.length() == 1) {
   7181     Handle<Map> untransitionable_map = untransitionable_maps[0];
   7182     HInstruction* instr = NULL;
   7183     if (!CanInlineElementAccess(untransitionable_map)) {
   7184       instr = AddInstruction(
   7185           BuildKeyedGeneric(access_type, expr, slot, object, key, val));
   7186     } else {
   7187       instr = BuildMonomorphicElementAccess(
   7188           object, key, val, transition, untransitionable_map, access_type,
   7189           store_mode);
   7190     }
   7191     *has_side_effects |= instr->HasObservableSideEffects();
   7192     return access_type == STORE ? val : instr;
   7193   }
   7194 
   7195   HBasicBlock* join = graph()->CreateBasicBlock();
   7196 
   7197   for (int i = 0; i < untransitionable_maps.length(); ++i) {
   7198     Handle<Map> map = untransitionable_maps[i];
   7199     ElementsKind elements_kind = map->elements_kind();
   7200     HBasicBlock* this_map = graph()->CreateBasicBlock();
   7201     HBasicBlock* other_map = graph()->CreateBasicBlock();
   7202     HCompareMap* mapcompare =
   7203         New<HCompareMap>(object, map, this_map, other_map);
   7204     FinishCurrentBlock(mapcompare);
   7205 
   7206     set_current_block(this_map);
   7207     HInstruction* access = NULL;
   7208     if (!CanInlineElementAccess(map)) {
   7209       access = AddInstruction(
   7210           BuildKeyedGeneric(access_type, expr, slot, object, key, val));
   7211     } else {
   7212       DCHECK(IsFastElementsKind(elements_kind) ||
   7213              IsFixedTypedArrayElementsKind(elements_kind));
   7214       LoadKeyedHoleMode load_mode = BuildKeyedHoleMode(map);
   7215       // Happily, mapcompare is a checked object.
   7216       access = BuildUncheckedMonomorphicElementAccess(
   7217           mapcompare, key, val,
   7218           map->instance_type() == JS_ARRAY_TYPE,
   7219           elements_kind, access_type,
   7220           load_mode,
   7221           store_mode);
   7222     }
   7223     *has_side_effects |= access->HasObservableSideEffects();
   7224     // The caller will use has_side_effects and add a correct Simulate.
   7225     access->SetFlag(HValue::kHasNoObservableSideEffects);
   7226     if (access_type == LOAD) {
   7227       Push(access);
   7228     }
   7229     NoObservableSideEffectsScope scope(this);
   7230     GotoNoSimulate(join);
   7231     set_current_block(other_map);
   7232   }
   7233 
   7234   // Ensure that we visited at least one map above that goes to join. This is
   7235   // necessary because FinishExitWithHardDeoptimization does an AbnormalExit
   7236   // rather than joining the join block. If this becomes an issue, insert a
   7237   // generic access in the case length() == 0.
   7238   DCHECK(join->predecessors()->length() > 0);
   7239   // Deopt if none of the cases matched.
   7240   NoObservableSideEffectsScope scope(this);
   7241   FinishExitWithHardDeoptimization(
   7242       DeoptimizeReason::kUnknownMapInPolymorphicElementAccess);
   7243   set_current_block(join);
   7244   return access_type == STORE ? val : Pop();
   7245 }
   7246 
   7247 HValue* HOptimizedGraphBuilder::HandleKeyedElementAccess(
   7248     HValue* obj, HValue* key, HValue* val, Expression* expr,
   7249     FeedbackVectorSlot slot, BailoutId ast_id, BailoutId return_id,
   7250     PropertyAccessType access_type, bool* has_side_effects) {
   7251   // A keyed name access with type feedback may contain the name.
   7252   Handle<TypeFeedbackVector> vector =
   7253       handle(current_feedback_vector(), isolate());
   7254   HValue* expected_key = key;
   7255   if (!key->ActualValue()->IsConstant()) {
   7256     Name* name = nullptr;
   7257     if (access_type == LOAD) {
   7258       KeyedLoadICNexus nexus(vector, slot);
   7259       name = nexus.FindFirstName();
   7260     } else {
   7261       KeyedStoreICNexus nexus(vector, slot);
   7262       name = nexus.FindFirstName();
   7263     }
   7264     if (name != nullptr) {
   7265       Handle<Name> handle_name(name);
   7266       expected_key = Add<HConstant>(handle_name);
   7267       // We need a check against the key.
   7268       bool in_new_space = isolate()->heap()->InNewSpace(*handle_name);
   7269       Unique<Name> unique_name = Unique<Name>::CreateUninitialized(handle_name);
   7270       Add<HCheckValue>(key, unique_name, in_new_space);
   7271     }
   7272   }
   7273   if (expected_key->ActualValue()->IsConstant()) {
   7274     Handle<Object> constant =
   7275         HConstant::cast(expected_key->ActualValue())->handle(isolate());
   7276     uint32_t array_index;
   7277     if ((constant->IsString() &&
   7278          !Handle<String>::cast(constant)->AsArrayIndex(&array_index)) ||
   7279         constant->IsSymbol()) {
   7280       if (!constant->IsUniqueName()) {
   7281         constant = isolate()->factory()->InternalizeString(
   7282             Handle<String>::cast(constant));
   7283       }
   7284       HValue* access =
   7285           BuildNamedAccess(access_type, ast_id, return_id, expr, slot, obj,
   7286                            Handle<Name>::cast(constant), val, false);
   7287       if (access == NULL || access->IsPhi() ||
   7288           HInstruction::cast(access)->IsLinked()) {
   7289         *has_side_effects = false;
   7290       } else {
   7291         HInstruction* instr = HInstruction::cast(access);
   7292         AddInstruction(instr);
   7293         *has_side_effects = instr->HasObservableSideEffects();
   7294       }
   7295       return access;
   7296     }
   7297   }
   7298 
   7299   DCHECK(!expr->IsPropertyName());
   7300   HInstruction* instr = NULL;
   7301 
   7302   SmallMapList* maps;
   7303   bool monomorphic = ComputeReceiverTypes(expr, obj, &maps, this);
   7304 
   7305   bool force_generic = false;
   7306   if (expr->GetKeyType() == PROPERTY) {
   7307     // Non-Generic accesses assume that elements are being accessed, and will
   7308     // deopt for non-index keys, which the IC knows will occur.
   7309     // TODO(jkummerow): Consider adding proper support for property accesses.
   7310     force_generic = true;
   7311     monomorphic = false;
   7312   } else if (access_type == STORE &&
   7313              (monomorphic || (maps != NULL && !maps->is_empty()))) {
   7314     // Stores can't be mono/polymorphic if their prototype chain has dictionary
   7315     // elements. However a receiver map that has dictionary elements itself
   7316     // should be left to normal mono/poly behavior (the other maps may benefit
   7317     // from highly optimized stores).
   7318     for (int i = 0; i < maps->length(); i++) {
   7319       Handle<Map> current_map = maps->at(i);
   7320       if (current_map->DictionaryElementsInPrototypeChainOnly()) {
   7321         force_generic = true;
   7322         monomorphic = false;
   7323         break;
   7324       }
   7325     }
   7326   } else if (access_type == LOAD && !monomorphic &&
   7327              (maps != NULL && !maps->is_empty())) {
   7328     // Polymorphic loads have to go generic if any of the maps are strings.
   7329     // If some, but not all of the maps are strings, we should go generic
   7330     // because polymorphic access wants to key on ElementsKind and isn't
   7331     // compatible with strings.
   7332     for (int i = 0; i < maps->length(); i++) {
   7333       Handle<Map> current_map = maps->at(i);
   7334       if (current_map->IsStringMap()) {
   7335         force_generic = true;
   7336         break;
   7337       }
   7338     }
   7339   }
   7340 
   7341   if (monomorphic) {
   7342     Handle<Map> map = maps->first();
   7343     if (!CanInlineElementAccess(map)) {
   7344       instr = AddInstruction(
   7345           BuildKeyedGeneric(access_type, expr, slot, obj, key, val));
   7346     } else {
   7347       BuildCheckHeapObject(obj);
   7348       instr = BuildMonomorphicElementAccess(
   7349           obj, key, val, NULL, map, access_type, expr->GetStoreMode());
   7350     }
   7351   } else if (!for