Home | History | Annotate | Download | only in arm64
      1 // Copyright 2013 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "src/v8.h"
      6 
      7 #if V8_TARGET_ARCH_ARM64
      8 
      9 #include "src/code-stubs.h"
     10 #include "src/codegen.h"
     11 #include "src/compiler.h"
     12 #include "src/debug.h"
     13 #include "src/full-codegen.h"
     14 #include "src/isolate-inl.h"
     15 #include "src/parser.h"
     16 #include "src/scopes.h"
     17 #include "src/stub-cache.h"
     18 
     19 #include "src/arm64/code-stubs-arm64.h"
     20 #include "src/arm64/macro-assembler-arm64.h"
     21 
     22 namespace v8 {
     23 namespace internal {
     24 
     25 #define __ ACCESS_MASM(masm_)
     26 
     27 class JumpPatchSite BASE_EMBEDDED {
     28  public:
     29   explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm), reg_(NoReg) {
     30 #ifdef DEBUG
     31     info_emitted_ = false;
     32 #endif
     33   }
     34 
     35   ~JumpPatchSite() {
     36     if (patch_site_.is_bound()) {
     37       ASSERT(info_emitted_);
     38     } else {
     39       ASSERT(reg_.IsNone());
     40     }
     41   }
     42 
     43   void EmitJumpIfNotSmi(Register reg, Label* target) {
     44     // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc.
     45     InstructionAccurateScope scope(masm_, 1);
     46     ASSERT(!info_emitted_);
     47     ASSERT(reg.Is64Bits());
     48     ASSERT(!reg.Is(csp));
     49     reg_ = reg;
     50     __ bind(&patch_site_);
     51     __ tbz(xzr, 0, target);   // Always taken before patched.
     52   }
     53 
     54   void EmitJumpIfSmi(Register reg, Label* target) {
     55     // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc.
     56     InstructionAccurateScope scope(masm_, 1);
     57     ASSERT(!info_emitted_);
     58     ASSERT(reg.Is64Bits());
     59     ASSERT(!reg.Is(csp));
     60     reg_ = reg;
     61     __ bind(&patch_site_);
     62     __ tbnz(xzr, 0, target);  // Never taken before patched.
     63   }
     64 
     65   void EmitJumpIfEitherNotSmi(Register reg1, Register reg2, Label* target) {
     66     UseScratchRegisterScope temps(masm_);
     67     Register temp = temps.AcquireX();
     68     __ Orr(temp, reg1, reg2);
     69     EmitJumpIfNotSmi(temp, target);
     70   }
     71 
     72   void EmitPatchInfo() {
     73     Assembler::BlockPoolsScope scope(masm_);
     74     InlineSmiCheckInfo::Emit(masm_, reg_, &patch_site_);
     75 #ifdef DEBUG
     76     info_emitted_ = true;
     77 #endif
     78   }
     79 
     80  private:
     81   MacroAssembler* masm_;
     82   Label patch_site_;
     83   Register reg_;
     84 #ifdef DEBUG
     85   bool info_emitted_;
     86 #endif
     87 };
     88 
     89 
     90 // Generate code for a JS function. On entry to the function the receiver
     91 // and arguments have been pushed on the stack left to right. The actual
     92 // argument count matches the formal parameter count expected by the
     93 // function.
     94 //
     95 // The live registers are:
     96 //   - x1: the JS function object being called (i.e. ourselves).
     97 //   - cp: our context.
     98 //   - fp: our caller's frame pointer.
     99 //   - jssp: stack pointer.
    100 //   - lr: return address.
    101 //
    102 // The function builds a JS frame. See JavaScriptFrameConstants in
    103 // frames-arm.h for its layout.
    104 void FullCodeGenerator::Generate() {
    105   CompilationInfo* info = info_;
    106   handler_table_ =
    107       isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
    108 
    109   profiling_counter_ = isolate()->factory()->NewCell(
    110       Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
    111   SetFunctionPosition(function());
    112   Comment cmnt(masm_, "[ Function compiled by full code generator");
    113 
    114   ProfileEntryHookStub::MaybeCallEntryHook(masm_);
    115 
    116 #ifdef DEBUG
    117   if (strlen(FLAG_stop_at) > 0 &&
    118       info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
    119     __ Debug("stop-at", __LINE__, BREAK);
    120   }
    121 #endif
    122 
    123   // Sloppy mode functions and builtins need to replace the receiver with the
    124   // global proxy when called as functions (without an explicit receiver
    125   // object).
    126   if (info->strict_mode() == SLOPPY && !info->is_native()) {
    127     Label ok;
    128     int receiver_offset = info->scope()->num_parameters() * kXRegSize;
    129     __ Peek(x10, receiver_offset);
    130     __ JumpIfNotRoot(x10, Heap::kUndefinedValueRootIndex, &ok);
    131 
    132     __ Ldr(x10, GlobalObjectMemOperand());
    133     __ Ldr(x10, FieldMemOperand(x10, GlobalObject::kGlobalReceiverOffset));
    134     __ Poke(x10, receiver_offset);
    135 
    136     __ Bind(&ok);
    137   }
    138 
    139 
    140   // Open a frame scope to indicate that there is a frame on the stack.
    141   // The MANUAL indicates that the scope shouldn't actually generate code
    142   // to set up the frame because we do it manually below.
    143   FrameScope frame_scope(masm_, StackFrame::MANUAL);
    144 
    145   // This call emits the following sequence in a way that can be patched for
    146   // code ageing support:
    147   //  Push(lr, fp, cp, x1);
    148   //  Add(fp, jssp, 2 * kPointerSize);
    149   info->set_prologue_offset(masm_->pc_offset());
    150   __ Prologue(info->IsCodePreAgingActive());
    151   info->AddNoFrameRange(0, masm_->pc_offset());
    152 
    153   // Reserve space on the stack for locals.
    154   { Comment cmnt(masm_, "[ Allocate locals");
    155     int locals_count = info->scope()->num_stack_slots();
    156     // Generators allocate locals, if any, in context slots.
    157     ASSERT(!info->function()->is_generator() || locals_count == 0);
    158 
    159     if (locals_count > 0) {
    160       if (locals_count >= 128) {
    161         Label ok;
    162         ASSERT(jssp.Is(__ StackPointer()));
    163         __ Sub(x10, jssp, locals_count * kPointerSize);
    164         __ CompareRoot(x10, Heap::kRealStackLimitRootIndex);
    165         __ B(hs, &ok);
    166         __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
    167         __ Bind(&ok);
    168       }
    169       __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
    170       if (FLAG_optimize_for_size) {
    171         __ PushMultipleTimes(x10 , locals_count);
    172       } else {
    173         const int kMaxPushes = 32;
    174         if (locals_count >= kMaxPushes) {
    175           int loop_iterations = locals_count / kMaxPushes;
    176           __ Mov(x3, loop_iterations);
    177           Label loop_header;
    178           __ Bind(&loop_header);
    179           // Do pushes.
    180           __ PushMultipleTimes(x10 , kMaxPushes);
    181           __ Subs(x3, x3, 1);
    182           __ B(ne, &loop_header);
    183         }
    184         int remaining = locals_count % kMaxPushes;
    185         // Emit the remaining pushes.
    186         __ PushMultipleTimes(x10 , remaining);
    187       }
    188     }
    189   }
    190 
    191   bool function_in_register_x1 = true;
    192 
    193   int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
    194   if (heap_slots > 0) {
    195     // Argument to NewContext is the function, which is still in x1.
    196     Comment cmnt(masm_, "[ Allocate context");
    197     bool need_write_barrier = true;
    198     if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
    199       __ Mov(x10, Operand(info->scope()->GetScopeInfo()));
    200       __ Push(x1, x10);
    201       __ CallRuntime(Runtime::kHiddenNewGlobalContext, 2);
    202     } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
    203       FastNewContextStub stub(isolate(), heap_slots);
    204       __ CallStub(&stub);
    205       // Result of FastNewContextStub is always in new space.
    206       need_write_barrier = false;
    207     } else {
    208       __ Push(x1);
    209       __ CallRuntime(Runtime::kHiddenNewFunctionContext, 1);
    210     }
    211     function_in_register_x1 = false;
    212     // Context is returned in x0.  It replaces the context passed to us.
    213     // It's saved in the stack and kept live in cp.
    214     __ Mov(cp, x0);
    215     __ Str(x0, MemOperand(fp, StandardFrameConstants::kContextOffset));
    216     // Copy any necessary parameters into the context.
    217     int num_parameters = info->scope()->num_parameters();
    218     for (int i = 0; i < num_parameters; i++) {
    219       Variable* var = scope()->parameter(i);
    220       if (var->IsContextSlot()) {
    221         int parameter_offset = StandardFrameConstants::kCallerSPOffset +
    222             (num_parameters - 1 - i) * kPointerSize;
    223         // Load parameter from stack.
    224         __ Ldr(x10, MemOperand(fp, parameter_offset));
    225         // Store it in the context.
    226         MemOperand target = ContextMemOperand(cp, var->index());
    227         __ Str(x10, target);
    228 
    229         // Update the write barrier.
    230         if (need_write_barrier) {
    231           __ RecordWriteContextSlot(
    232               cp, target.offset(), x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
    233         } else if (FLAG_debug_code) {
    234           Label done;
    235           __ JumpIfInNewSpace(cp, &done);
    236           __ Abort(kExpectedNewSpaceObject);
    237           __ bind(&done);
    238         }
    239       }
    240     }
    241   }
    242 
    243   Variable* arguments = scope()->arguments();
    244   if (arguments != NULL) {
    245     // Function uses arguments object.
    246     Comment cmnt(masm_, "[ Allocate arguments object");
    247     if (!function_in_register_x1) {
    248       // Load this again, if it's used by the local context below.
    249       __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
    250     } else {
    251       __ Mov(x3, x1);
    252     }
    253     // Receiver is just before the parameters on the caller's stack.
    254     int num_parameters = info->scope()->num_parameters();
    255     int offset = num_parameters * kPointerSize;
    256     __ Add(x2, fp, StandardFrameConstants::kCallerSPOffset + offset);
    257     __ Mov(x1, Smi::FromInt(num_parameters));
    258     __ Push(x3, x2, x1);
    259 
    260     // Arguments to ArgumentsAccessStub:
    261     //   function, receiver address, parameter count.
    262     // The stub will rewrite receiver and parameter count if the previous
    263     // stack frame was an arguments adapter frame.
    264     ArgumentsAccessStub::Type type;
    265     if (strict_mode() == STRICT) {
    266       type = ArgumentsAccessStub::NEW_STRICT;
    267     } else if (function()->has_duplicate_parameters()) {
    268       type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
    269     } else {
    270       type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
    271     }
    272     ArgumentsAccessStub stub(isolate(), type);
    273     __ CallStub(&stub);
    274 
    275     SetVar(arguments, x0, x1, x2);
    276   }
    277 
    278   if (FLAG_trace) {
    279     __ CallRuntime(Runtime::kTraceEnter, 0);
    280   }
    281 
    282 
    283   // Visit the declarations and body unless there is an illegal
    284   // redeclaration.
    285   if (scope()->HasIllegalRedeclaration()) {
    286     Comment cmnt(masm_, "[ Declarations");
    287     scope()->VisitIllegalRedeclaration(this);
    288 
    289   } else {
    290     PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
    291     { Comment cmnt(masm_, "[ Declarations");
    292       if (scope()->is_function_scope() && scope()->function() != NULL) {
    293         VariableDeclaration* function = scope()->function();
    294         ASSERT(function->proxy()->var()->mode() == CONST ||
    295                function->proxy()->var()->mode() == CONST_LEGACY);
    296         ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
    297         VisitVariableDeclaration(function);
    298       }
    299       VisitDeclarations(scope()->declarations());
    300     }
    301   }
    302 
    303   { Comment cmnt(masm_, "[ Stack check");
    304     PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
    305     Label ok;
    306     ASSERT(jssp.Is(__ StackPointer()));
    307     __ CompareRoot(jssp, Heap::kStackLimitRootIndex);
    308     __ B(hs, &ok);
    309     PredictableCodeSizeScope predictable(masm_,
    310                                          Assembler::kCallSizeWithRelocation);
    311     __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
    312     __ Bind(&ok);
    313   }
    314 
    315   { Comment cmnt(masm_, "[ Body");
    316     ASSERT(loop_depth() == 0);
    317     VisitStatements(function()->body());
    318     ASSERT(loop_depth() == 0);
    319   }
    320 
    321   // Always emit a 'return undefined' in case control fell off the end of
    322   // the body.
    323   { Comment cmnt(masm_, "[ return <undefined>;");
    324     __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
    325   }
    326   EmitReturnSequence();
    327 
    328   // Force emission of the pools, so they don't get emitted in the middle
    329   // of the back edge table.
    330   masm()->CheckVeneerPool(true, false);
    331   masm()->CheckConstPool(true, false);
    332 }
    333 
    334 
    335 void FullCodeGenerator::ClearAccumulator() {
    336   __ Mov(x0, Smi::FromInt(0));
    337 }
    338 
    339 
    340 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
    341   __ Mov(x2, Operand(profiling_counter_));
    342   __ Ldr(x3, FieldMemOperand(x2, Cell::kValueOffset));
    343   __ Subs(x3, x3, Smi::FromInt(delta));
    344   __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
    345 }
    346 
    347 
    348 void FullCodeGenerator::EmitProfilingCounterReset() {
    349   int reset_value = FLAG_interrupt_budget;
    350   if (info_->is_debug()) {
    351     // Detect debug break requests as soon as possible.
    352     reset_value = FLAG_interrupt_budget >> 4;
    353   }
    354   __ Mov(x2, Operand(profiling_counter_));
    355   __ Mov(x3, Smi::FromInt(reset_value));
    356   __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
    357 }
    358 
    359 
    360 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
    361                                                 Label* back_edge_target) {
    362   ASSERT(jssp.Is(__ StackPointer()));
    363   Comment cmnt(masm_, "[ Back edge bookkeeping");
    364   // Block literal pools whilst emitting back edge code.
    365   Assembler::BlockPoolsScope block_const_pool(masm_);
    366   Label ok;
    367 
    368   ASSERT(back_edge_target->is_bound());
    369   // We want to do a round rather than a floor of distance/kCodeSizeMultiplier
    370   // to reduce the absolute error due to the integer division. To do that,
    371   // we add kCodeSizeMultiplier/2 to the distance (equivalent to adding 0.5 to
    372   // the result).
    373   int distance =
    374     masm_->SizeOfCodeGeneratedSince(back_edge_target) + kCodeSizeMultiplier / 2;
    375   int weight = Min(kMaxBackEdgeWeight,
    376                    Max(1, distance / kCodeSizeMultiplier));
    377   EmitProfilingCounterDecrement(weight);
    378   __ B(pl, &ok);
    379   __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
    380 
    381   // Record a mapping of this PC offset to the OSR id.  This is used to find
    382   // the AST id from the unoptimized code in order to use it as a key into
    383   // the deoptimization input data found in the optimized code.
    384   RecordBackEdge(stmt->OsrEntryId());
    385 
    386   EmitProfilingCounterReset();
    387 
    388   __ Bind(&ok);
    389   PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
    390   // Record a mapping of the OSR id to this PC.  This is used if the OSR
    391   // entry becomes the target of a bailout.  We don't expect it to be, but
    392   // we want it to work if it is.
    393   PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
    394 }
    395 
    396 
    397 void FullCodeGenerator::EmitReturnSequence() {
    398   Comment cmnt(masm_, "[ Return sequence");
    399 
    400   if (return_label_.is_bound()) {
    401     __ B(&return_label_);
    402 
    403   } else {
    404     __ Bind(&return_label_);
    405     if (FLAG_trace) {
    406       // Push the return value on the stack as the parameter.
    407       // Runtime::TraceExit returns its parameter in x0.
    408       __ Push(result_register());
    409       __ CallRuntime(Runtime::kTraceExit, 1);
    410       ASSERT(x0.Is(result_register()));
    411     }
    412     // Pretend that the exit is a backwards jump to the entry.
    413     int weight = 1;
    414     if (info_->ShouldSelfOptimize()) {
    415       weight = FLAG_interrupt_budget / FLAG_self_opt_count;
    416     } else {
    417       int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2;
    418       weight = Min(kMaxBackEdgeWeight,
    419                    Max(1, distance / kCodeSizeMultiplier));
    420     }
    421     EmitProfilingCounterDecrement(weight);
    422     Label ok;
    423     __ B(pl, &ok);
    424     __ Push(x0);
    425     __ Call(isolate()->builtins()->InterruptCheck(),
    426             RelocInfo::CODE_TARGET);
    427     __ Pop(x0);
    428     EmitProfilingCounterReset();
    429     __ Bind(&ok);
    430 
    431     // Make sure that the constant pool is not emitted inside of the return
    432     // sequence. This sequence can get patched when the debugger is used. See
    433     // debug-arm64.cc:BreakLocationIterator::SetDebugBreakAtReturn().
    434     {
    435       InstructionAccurateScope scope(masm_,
    436                                      Assembler::kJSRetSequenceInstructions);
    437       CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
    438       __ RecordJSReturn();
    439       // This code is generated using Assembler methods rather than Macro
    440       // Assembler methods because it will be patched later on, and so the size
    441       // of the generated code must be consistent.
    442       const Register& current_sp = __ StackPointer();
    443       // Nothing ensures 16 bytes alignment here.
    444       ASSERT(!current_sp.Is(csp));
    445       __ mov(current_sp, fp);
    446       int no_frame_start = masm_->pc_offset();
    447       __ ldp(fp, lr, MemOperand(current_sp, 2 * kXRegSize, PostIndex));
    448       // Drop the arguments and receiver and return.
    449       // TODO(all): This implementation is overkill as it supports 2**31+1
    450       // arguments, consider how to improve it without creating a security
    451       // hole.
    452       __ ldr_pcrel(ip0, (3 * kInstructionSize) >> kLoadLiteralScaleLog2);
    453       __ add(current_sp, current_sp, ip0);
    454       __ ret();
    455       __ dc64(kXRegSize * (info_->scope()->num_parameters() + 1));
    456       info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
    457     }
    458   }
    459 }
    460 
    461 
    462 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
    463   ASSERT(var->IsStackAllocated() || var->IsContextSlot());
    464 }
    465 
    466 
    467 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
    468   ASSERT(var->IsStackAllocated() || var->IsContextSlot());
    469   codegen()->GetVar(result_register(), var);
    470 }
    471 
    472 
    473 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
    474   ASSERT(var->IsStackAllocated() || var->IsContextSlot());
    475   codegen()->GetVar(result_register(), var);
    476   __ Push(result_register());
    477 }
    478 
    479 
    480 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
    481   ASSERT(var->IsStackAllocated() || var->IsContextSlot());
    482   // For simplicity we always test the accumulator register.
    483   codegen()->GetVar(result_register(), var);
    484   codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
    485   codegen()->DoTest(this);
    486 }
    487 
    488 
    489 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
    490   // Root values have no side effects.
    491 }
    492 
    493 
    494 void FullCodeGenerator::AccumulatorValueContext::Plug(
    495     Heap::RootListIndex index) const {
    496   __ LoadRoot(result_register(), index);
    497 }
    498 
    499 
    500 void FullCodeGenerator::StackValueContext::Plug(
    501     Heap::RootListIndex index) const {
    502   __ LoadRoot(result_register(), index);
    503   __ Push(result_register());
    504 }
    505 
    506 
    507 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
    508   codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
    509                                           false_label_);
    510   if (index == Heap::kUndefinedValueRootIndex ||
    511       index == Heap::kNullValueRootIndex ||
    512       index == Heap::kFalseValueRootIndex) {
    513     if (false_label_ != fall_through_) __ B(false_label_);
    514   } else if (index == Heap::kTrueValueRootIndex) {
    515     if (true_label_ != fall_through_) __ B(true_label_);
    516   } else {
    517     __ LoadRoot(result_register(), index);
    518     codegen()->DoTest(this);
    519   }
    520 }
    521 
    522 
    523 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
    524 }
    525 
    526 
    527 void FullCodeGenerator::AccumulatorValueContext::Plug(
    528     Handle<Object> lit) const {
    529   __ Mov(result_register(), Operand(lit));
    530 }
    531 
    532 
    533 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
    534   // Immediates cannot be pushed directly.
    535   __ Mov(result_register(), Operand(lit));
    536   __ Push(result_register());
    537 }
    538 
    539 
    540 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
    541   codegen()->PrepareForBailoutBeforeSplit(condition(),
    542                                           true,
    543                                           true_label_,
    544                                           false_label_);
    545   ASSERT(!lit->IsUndetectableObject());  // There are no undetectable literals.
    546   if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
    547     if (false_label_ != fall_through_) __ B(false_label_);
    548   } else if (lit->IsTrue() || lit->IsJSObject()) {
    549     if (true_label_ != fall_through_) __ B(true_label_);
    550   } else if (lit->IsString()) {
    551     if (String::cast(*lit)->length() == 0) {
    552       if (false_label_ != fall_through_) __ B(false_label_);
    553     } else {
    554       if (true_label_ != fall_through_) __ B(true_label_);
    555     }
    556   } else if (lit->IsSmi()) {
    557     if (Smi::cast(*lit)->value() == 0) {
    558       if (false_label_ != fall_through_) __ B(false_label_);
    559     } else {
    560       if (true_label_ != fall_through_) __ B(true_label_);
    561     }
    562   } else {
    563     // For simplicity we always test the accumulator register.
    564     __ Mov(result_register(), Operand(lit));
    565     codegen()->DoTest(this);
    566   }
    567 }
    568 
    569 
    570 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
    571                                                    Register reg) const {
    572   ASSERT(count > 0);
    573   __ Drop(count);
    574 }
    575 
    576 
    577 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
    578     int count,
    579     Register reg) const {
    580   ASSERT(count > 0);
    581   __ Drop(count);
    582   __ Move(result_register(), reg);
    583 }
    584 
    585 
    586 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
    587                                                        Register reg) const {
    588   ASSERT(count > 0);
    589   if (count > 1) __ Drop(count - 1);
    590   __ Poke(reg, 0);
    591 }
    592 
    593 
    594 void FullCodeGenerator::TestContext::DropAndPlug(int count,
    595                                                  Register reg) const {
    596   ASSERT(count > 0);
    597   // For simplicity we always test the accumulator register.
    598   __ Drop(count);
    599   __ Mov(result_register(), reg);
    600   codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
    601   codegen()->DoTest(this);
    602 }
    603 
    604 
    605 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
    606                                             Label* materialize_false) const {
    607   ASSERT(materialize_true == materialize_false);
    608   __ Bind(materialize_true);
    609 }
    610 
    611 
    612 void FullCodeGenerator::AccumulatorValueContext::Plug(
    613     Label* materialize_true,
    614     Label* materialize_false) const {
    615   Label done;
    616   __ Bind(materialize_true);
    617   __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
    618   __ B(&done);
    619   __ Bind(materialize_false);
    620   __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
    621   __ Bind(&done);
    622 }
    623 
    624 
    625 void FullCodeGenerator::StackValueContext::Plug(
    626     Label* materialize_true,
    627     Label* materialize_false) const {
    628   Label done;
    629   __ Bind(materialize_true);
    630   __ LoadRoot(x10, Heap::kTrueValueRootIndex);
    631   __ B(&done);
    632   __ Bind(materialize_false);
    633   __ LoadRoot(x10, Heap::kFalseValueRootIndex);
    634   __ Bind(&done);
    635   __ Push(x10);
    636 }
    637 
    638 
    639 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
    640                                           Label* materialize_false) const {
    641   ASSERT(materialize_true == true_label_);
    642   ASSERT(materialize_false == false_label_);
    643 }
    644 
    645 
    646 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
    647 }
    648 
    649 
    650 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
    651   Heap::RootListIndex value_root_index =
    652       flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
    653   __ LoadRoot(result_register(), value_root_index);
    654 }
    655 
    656 
    657 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
    658   Heap::RootListIndex value_root_index =
    659       flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
    660   __ LoadRoot(x10, value_root_index);
    661   __ Push(x10);
    662 }
    663 
    664 
    665 void FullCodeGenerator::TestContext::Plug(bool flag) const {
    666   codegen()->PrepareForBailoutBeforeSplit(condition(),
    667                                           true,
    668                                           true_label_,
    669                                           false_label_);
    670   if (flag) {
    671     if (true_label_ != fall_through_) {
    672       __ B(true_label_);
    673     }
    674   } else {
    675     if (false_label_ != fall_through_) {
    676       __ B(false_label_);
    677     }
    678   }
    679 }
    680 
    681 
    682 void FullCodeGenerator::DoTest(Expression* condition,
    683                                Label* if_true,
    684                                Label* if_false,
    685                                Label* fall_through) {
    686   Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
    687   CallIC(ic, condition->test_id());
    688   __ CompareAndSplit(result_register(), 0, ne, if_true, if_false, fall_through);
    689 }
    690 
    691 
    692 // If (cond), branch to if_true.
    693 // If (!cond), branch to if_false.
    694 // fall_through is used as an optimization in cases where only one branch
    695 // instruction is necessary.
    696 void FullCodeGenerator::Split(Condition cond,
    697                               Label* if_true,
    698                               Label* if_false,
    699                               Label* fall_through) {
    700   if (if_false == fall_through) {
    701     __ B(cond, if_true);
    702   } else if (if_true == fall_through) {
    703     ASSERT(if_false != fall_through);
    704     __ B(NegateCondition(cond), if_false);
    705   } else {
    706     __ B(cond, if_true);
    707     __ B(if_false);
    708   }
    709 }
    710 
    711 
    712 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
    713   // Offset is negative because higher indexes are at lower addresses.
    714   int offset = -var->index() * kXRegSize;
    715   // Adjust by a (parameter or local) base offset.
    716   if (var->IsParameter()) {
    717     offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
    718   } else {
    719     offset += JavaScriptFrameConstants::kLocal0Offset;
    720   }
    721   return MemOperand(fp, offset);
    722 }
    723 
    724 
    725 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
    726   ASSERT(var->IsContextSlot() || var->IsStackAllocated());
    727   if (var->IsContextSlot()) {
    728     int context_chain_length = scope()->ContextChainLength(var->scope());
    729     __ LoadContext(scratch, context_chain_length);
    730     return ContextMemOperand(scratch, var->index());
    731   } else {
    732     return StackOperand(var);
    733   }
    734 }
    735 
    736 
    737 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
    738   // Use destination as scratch.
    739   MemOperand location = VarOperand(var, dest);
    740   __ Ldr(dest, location);
    741 }
    742 
    743 
    744 void FullCodeGenerator::SetVar(Variable* var,
    745                                Register src,
    746                                Register scratch0,
    747                                Register scratch1) {
    748   ASSERT(var->IsContextSlot() || var->IsStackAllocated());
    749   ASSERT(!AreAliased(src, scratch0, scratch1));
    750   MemOperand location = VarOperand(var, scratch0);
    751   __ Str(src, location);
    752 
    753   // Emit the write barrier code if the location is in the heap.
    754   if (var->IsContextSlot()) {
    755     // scratch0 contains the correct context.
    756     __ RecordWriteContextSlot(scratch0,
    757                               location.offset(),
    758                               src,
    759                               scratch1,
    760                               kLRHasBeenSaved,
    761                               kDontSaveFPRegs);
    762   }
    763 }
    764 
    765 
    766 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
    767                                                      bool should_normalize,
    768                                                      Label* if_true,
    769                                                      Label* if_false) {
    770   // Only prepare for bailouts before splits if we're in a test
    771   // context. Otherwise, we let the Visit function deal with the
    772   // preparation to avoid preparing with the same AST id twice.
    773   if (!context()->IsTest() || !info_->IsOptimizable()) return;
    774 
    775   // TODO(all): Investigate to see if there is something to work on here.
    776   Label skip;
    777   if (should_normalize) {
    778     __ B(&skip);
    779   }
    780   PrepareForBailout(expr, TOS_REG);
    781   if (should_normalize) {
    782     __ CompareRoot(x0, Heap::kTrueValueRootIndex);
    783     Split(eq, if_true, if_false, NULL);
    784     __ Bind(&skip);
    785   }
    786 }
    787 
    788 
    789 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
    790   // The variable in the declaration always resides in the current function
    791   // context.
    792   ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
    793   if (generate_debug_code_) {
    794     // Check that we're not inside a with or catch context.
    795     __ Ldr(x1, FieldMemOperand(cp, HeapObject::kMapOffset));
    796     __ CompareRoot(x1, Heap::kWithContextMapRootIndex);
    797     __ Check(ne, kDeclarationInWithContext);
    798     __ CompareRoot(x1, Heap::kCatchContextMapRootIndex);
    799     __ Check(ne, kDeclarationInCatchContext);
    800   }
    801 }
    802 
    803 
    804 void FullCodeGenerator::VisitVariableDeclaration(
    805     VariableDeclaration* declaration) {
    806   // If it was not possible to allocate the variable at compile time, we
    807   // need to "declare" it at runtime to make sure it actually exists in the
    808   // local context.
    809   VariableProxy* proxy = declaration->proxy();
    810   VariableMode mode = declaration->mode();
    811   Variable* variable = proxy->var();
    812   bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
    813 
    814   switch (variable->location()) {
    815     case Variable::UNALLOCATED:
    816       globals_->Add(variable->name(), zone());
    817       globals_->Add(variable->binding_needs_init()
    818                         ? isolate()->factory()->the_hole_value()
    819                         : isolate()->factory()->undefined_value(),
    820                     zone());
    821       break;
    822 
    823     case Variable::PARAMETER:
    824     case Variable::LOCAL:
    825       if (hole_init) {
    826         Comment cmnt(masm_, "[ VariableDeclaration");
    827         __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
    828         __ Str(x10, StackOperand(variable));
    829       }
    830       break;
    831 
    832     case Variable::CONTEXT:
    833       if (hole_init) {
    834         Comment cmnt(masm_, "[ VariableDeclaration");
    835         EmitDebugCheckDeclarationContext(variable);
    836         __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
    837         __ Str(x10, ContextMemOperand(cp, variable->index()));
    838         // No write barrier since the_hole_value is in old space.
    839         PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
    840       }
    841       break;
    842 
    843     case Variable::LOOKUP: {
    844       Comment cmnt(masm_, "[ VariableDeclaration");
    845       __ Mov(x2, Operand(variable->name()));
    846       // Declaration nodes are always introduced in one of four modes.
    847       ASSERT(IsDeclaredVariableMode(mode));
    848       PropertyAttributes attr = IsImmutableVariableMode(mode) ? READ_ONLY
    849                                                               : NONE;
    850       __ Mov(x1, Smi::FromInt(attr));
    851       // Push initial value, if any.
    852       // Note: For variables we must not push an initial value (such as
    853       // 'undefined') because we may have a (legal) redeclaration and we
    854       // must not destroy the current value.
    855       if (hole_init) {
    856         __ LoadRoot(x0, Heap::kTheHoleValueRootIndex);
    857         __ Push(cp, x2, x1, x0);
    858       } else {
    859         // Pushing 0 (xzr) indicates no initial value.
    860         __ Push(cp, x2, x1, xzr);
    861       }
    862       __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4);
    863       break;
    864     }
    865   }
    866 }
    867 
    868 
    869 void FullCodeGenerator::VisitFunctionDeclaration(
    870     FunctionDeclaration* declaration) {
    871   VariableProxy* proxy = declaration->proxy();
    872   Variable* variable = proxy->var();
    873   switch (variable->location()) {
    874     case Variable::UNALLOCATED: {
    875       globals_->Add(variable->name(), zone());
    876       Handle<SharedFunctionInfo> function =
    877           Compiler::BuildFunctionInfo(declaration->fun(), script());
    878       // Check for stack overflow exception.
    879       if (function.is_null()) return SetStackOverflow();
    880       globals_->Add(function, zone());
    881       break;
    882     }
    883 
    884     case Variable::PARAMETER:
    885     case Variable::LOCAL: {
    886       Comment cmnt(masm_, "[ Function Declaration");
    887       VisitForAccumulatorValue(declaration->fun());
    888       __ Str(result_register(), StackOperand(variable));
    889       break;
    890     }
    891 
    892     case Variable::CONTEXT: {
    893       Comment cmnt(masm_, "[ Function Declaration");
    894       EmitDebugCheckDeclarationContext(variable);
    895       VisitForAccumulatorValue(declaration->fun());
    896       __ Str(result_register(), ContextMemOperand(cp, variable->index()));
    897       int offset = Context::SlotOffset(variable->index());
    898       // We know that we have written a function, which is not a smi.
    899       __ RecordWriteContextSlot(cp,
    900                                 offset,
    901                                 result_register(),
    902                                 x2,
    903                                 kLRHasBeenSaved,
    904                                 kDontSaveFPRegs,
    905                                 EMIT_REMEMBERED_SET,
    906                                 OMIT_SMI_CHECK);
    907       PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
    908       break;
    909     }
    910 
    911     case Variable::LOOKUP: {
    912       Comment cmnt(masm_, "[ Function Declaration");
    913       __ Mov(x2, Operand(variable->name()));
    914       __ Mov(x1, Smi::FromInt(NONE));
    915       __ Push(cp, x2, x1);
    916       // Push initial value for function declaration.
    917       VisitForStackValue(declaration->fun());
    918       __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4);
    919       break;
    920     }
    921   }
    922 }
    923 
    924 
    925 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
    926   Variable* variable = declaration->proxy()->var();
    927   ASSERT(variable->location() == Variable::CONTEXT);
    928   ASSERT(variable->interface()->IsFrozen());
    929 
    930   Comment cmnt(masm_, "[ ModuleDeclaration");
    931   EmitDebugCheckDeclarationContext(variable);
    932 
    933   // Load instance object.
    934   __ LoadContext(x1, scope_->ContextChainLength(scope_->GlobalScope()));
    935   __ Ldr(x1, ContextMemOperand(x1, variable->interface()->Index()));
    936   __ Ldr(x1, ContextMemOperand(x1, Context::EXTENSION_INDEX));
    937 
    938   // Assign it.
    939   __ Str(x1, ContextMemOperand(cp, variable->index()));
    940   // We know that we have written a module, which is not a smi.
    941   __ RecordWriteContextSlot(cp,
    942                             Context::SlotOffset(variable->index()),
    943                             x1,
    944                             x3,
    945                             kLRHasBeenSaved,
    946                             kDontSaveFPRegs,
    947                             EMIT_REMEMBERED_SET,
    948                             OMIT_SMI_CHECK);
    949   PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
    950 
    951   // Traverse info body.
    952   Visit(declaration->module());
    953 }
    954 
    955 
    956 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
    957   VariableProxy* proxy = declaration->proxy();
    958   Variable* variable = proxy->var();
    959   switch (variable->location()) {
    960     case Variable::UNALLOCATED:
    961       // TODO(rossberg)
    962       break;
    963 
    964     case Variable::CONTEXT: {
    965       Comment cmnt(masm_, "[ ImportDeclaration");
    966       EmitDebugCheckDeclarationContext(variable);
    967       // TODO(rossberg)
    968       break;
    969     }
    970 
    971     case Variable::PARAMETER:
    972     case Variable::LOCAL:
    973     case Variable::LOOKUP:
    974       UNREACHABLE();
    975   }
    976 }
    977 
    978 
    979 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
    980   // TODO(rossberg)
    981 }
    982 
    983 
    984 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
    985   // Call the runtime to declare the globals.
    986   __ Mov(x11, Operand(pairs));
    987   Register flags = xzr;
    988   if (Smi::FromInt(DeclareGlobalsFlags())) {
    989     flags = x10;
    990   __ Mov(flags, Smi::FromInt(DeclareGlobalsFlags()));
    991   }
    992   __ Push(cp, x11, flags);
    993   __ CallRuntime(Runtime::kHiddenDeclareGlobals, 3);
    994   // Return value is ignored.
    995 }
    996 
    997 
    998 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
    999   // Call the runtime to declare the modules.
   1000   __ Push(descriptions);
   1001   __ CallRuntime(Runtime::kHiddenDeclareModules, 1);
   1002   // Return value is ignored.
   1003 }
   1004 
   1005 
   1006 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
   1007   ASM_LOCATION("FullCodeGenerator::VisitSwitchStatement");
   1008   Comment cmnt(masm_, "[ SwitchStatement");
   1009   Breakable nested_statement(this, stmt);
   1010   SetStatementPosition(stmt);
   1011 
   1012   // Keep the switch value on the stack until a case matches.
   1013   VisitForStackValue(stmt->tag());
   1014   PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
   1015 
   1016   ZoneList<CaseClause*>* clauses = stmt->cases();
   1017   CaseClause* default_clause = NULL;  // Can occur anywhere in the list.
   1018 
   1019   Label next_test;  // Recycled for each test.
   1020   // Compile all the tests with branches to their bodies.
   1021   for (int i = 0; i < clauses->length(); i++) {
   1022     CaseClause* clause = clauses->at(i);
   1023     clause->body_target()->Unuse();
   1024 
   1025     // The default is not a test, but remember it as final fall through.
   1026     if (clause->is_default()) {
   1027       default_clause = clause;
   1028       continue;
   1029     }
   1030 
   1031     Comment cmnt(masm_, "[ Case comparison");
   1032     __ Bind(&next_test);
   1033     next_test.Unuse();
   1034 
   1035     // Compile the label expression.
   1036     VisitForAccumulatorValue(clause->label());
   1037 
   1038     // Perform the comparison as if via '==='.
   1039     __ Peek(x1, 0);   // Switch value.
   1040 
   1041     JumpPatchSite patch_site(masm_);
   1042     if (ShouldInlineSmiCase(Token::EQ_STRICT)) {
   1043       Label slow_case;
   1044       patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
   1045       __ Cmp(x1, x0);
   1046       __ B(ne, &next_test);
   1047       __ Drop(1);  // Switch value is no longer needed.
   1048       __ B(clause->body_target());
   1049       __ Bind(&slow_case);
   1050     }
   1051 
   1052     // Record position before stub call for type feedback.
   1053     SetSourcePosition(clause->position());
   1054     Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT);
   1055     CallIC(ic, clause->CompareId());
   1056     patch_site.EmitPatchInfo();
   1057 
   1058     Label skip;
   1059     __ B(&skip);
   1060     PrepareForBailout(clause, TOS_REG);
   1061     __ JumpIfNotRoot(x0, Heap::kTrueValueRootIndex, &next_test);
   1062     __ Drop(1);
   1063     __ B(clause->body_target());
   1064     __ Bind(&skip);
   1065 
   1066     __ Cbnz(x0, &next_test);
   1067     __ Drop(1);  // Switch value is no longer needed.
   1068     __ B(clause->body_target());
   1069   }
   1070 
   1071   // Discard the test value and jump to the default if present, otherwise to
   1072   // the end of the statement.
   1073   __ Bind(&next_test);
   1074   __ Drop(1);  // Switch value is no longer needed.
   1075   if (default_clause == NULL) {
   1076     __ B(nested_statement.break_label());
   1077   } else {
   1078     __ B(default_clause->body_target());
   1079   }
   1080 
   1081   // Compile all the case bodies.
   1082   for (int i = 0; i < clauses->length(); i++) {
   1083     Comment cmnt(masm_, "[ Case body");
   1084     CaseClause* clause = clauses->at(i);
   1085     __ Bind(clause->body_target());
   1086     PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
   1087     VisitStatements(clause->statements());
   1088   }
   1089 
   1090   __ Bind(nested_statement.break_label());
   1091   PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
   1092 }
   1093 
   1094 
   1095 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
   1096   ASM_LOCATION("FullCodeGenerator::VisitForInStatement");
   1097   Comment cmnt(masm_, "[ ForInStatement");
   1098   int slot = stmt->ForInFeedbackSlot();
   1099   // TODO(all): This visitor probably needs better comments and a revisit.
   1100   SetStatementPosition(stmt);
   1101 
   1102   Label loop, exit;
   1103   ForIn loop_statement(this, stmt);
   1104   increment_loop_depth();
   1105 
   1106   // Get the object to enumerate over. If the object is null or undefined, skip
   1107   // over the loop.  See ECMA-262 version 5, section 12.6.4.
   1108   VisitForAccumulatorValue(stmt->enumerable());
   1109   __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, &exit);
   1110   Register null_value = x15;
   1111   __ LoadRoot(null_value, Heap::kNullValueRootIndex);
   1112   __ Cmp(x0, null_value);
   1113   __ B(eq, &exit);
   1114 
   1115   PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
   1116 
   1117   // Convert the object to a JS object.
   1118   Label convert, done_convert;
   1119   __ JumpIfSmi(x0, &convert);
   1120   __ JumpIfObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE, &done_convert, ge);
   1121   __ Bind(&convert);
   1122   __ Push(x0);
   1123   __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
   1124   __ Bind(&done_convert);
   1125   __ Push(x0);
   1126 
   1127   // Check for proxies.
   1128   Label call_runtime;
   1129   STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
   1130   __ JumpIfObjectType(x0, x10, x11, LAST_JS_PROXY_TYPE, &call_runtime, le);
   1131 
   1132   // Check cache validity in generated code. This is a fast case for
   1133   // the JSObject::IsSimpleEnum cache validity checks. If we cannot
   1134   // guarantee cache validity, call the runtime system to check cache
   1135   // validity or get the property names in a fixed array.
   1136   __ CheckEnumCache(x0, null_value, x10, x11, x12, x13, &call_runtime);
   1137 
   1138   // The enum cache is valid.  Load the map of the object being
   1139   // iterated over and use the cache for the iteration.
   1140   Label use_cache;
   1141   __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
   1142   __ B(&use_cache);
   1143 
   1144   // Get the set of properties to enumerate.
   1145   __ Bind(&call_runtime);
   1146   __ Push(x0);  // Duplicate the enumerable object on the stack.
   1147   __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
   1148 
   1149   // If we got a map from the runtime call, we can do a fast
   1150   // modification check. Otherwise, we got a fixed array, and we have
   1151   // to do a slow check.
   1152   Label fixed_array, no_descriptors;
   1153   __ Ldr(x2, FieldMemOperand(x0, HeapObject::kMapOffset));
   1154   __ JumpIfNotRoot(x2, Heap::kMetaMapRootIndex, &fixed_array);
   1155 
   1156   // We got a map in register x0. Get the enumeration cache from it.
   1157   __ Bind(&use_cache);
   1158 
   1159   __ EnumLengthUntagged(x1, x0);
   1160   __ Cbz(x1, &no_descriptors);
   1161 
   1162   __ LoadInstanceDescriptors(x0, x2);
   1163   __ Ldr(x2, FieldMemOperand(x2, DescriptorArray::kEnumCacheOffset));
   1164   __ Ldr(x2,
   1165          FieldMemOperand(x2, DescriptorArray::kEnumCacheBridgeCacheOffset));
   1166 
   1167   // Set up the four remaining stack slots.
   1168   __ Push(x0, x2);              // Map, enumeration cache.
   1169   __ SmiTagAndPush(x1, xzr);    // Enum cache length, zero (both as smis).
   1170   __ B(&loop);
   1171 
   1172   __ Bind(&no_descriptors);
   1173   __ Drop(1);
   1174   __ B(&exit);
   1175 
   1176   // We got a fixed array in register x0. Iterate through that.
   1177   __ Bind(&fixed_array);
   1178 
   1179   __ LoadObject(x1, FeedbackVector());
   1180   __ Mov(x10, Operand(TypeFeedbackInfo::MegamorphicSentinel(isolate())));
   1181   __ Str(x10, FieldMemOperand(x1, FixedArray::OffsetOfElementAt(slot)));
   1182 
   1183   __ Mov(x1, Smi::FromInt(1));  // Smi indicates slow check.
   1184   __ Peek(x10, 0);  // Get enumerated object.
   1185   STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
   1186   // TODO(all): similar check was done already. Can we avoid it here?
   1187   __ CompareObjectType(x10, x11, x12, LAST_JS_PROXY_TYPE);
   1188   ASSERT(Smi::FromInt(0) == 0);
   1189   __ CzeroX(x1, le);  // Zero indicates proxy.
   1190   __ Push(x1, x0);  // Smi and array
   1191   __ Ldr(x1, FieldMemOperand(x0, FixedArray::kLengthOffset));
   1192   __ Push(x1, xzr);  // Fixed array length (as smi) and initial index.
   1193 
   1194   // Generate code for doing the condition check.
   1195   PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
   1196   __ Bind(&loop);
   1197   // Load the current count to x0, load the length to x1.
   1198   __ PeekPair(x0, x1, 0);
   1199   __ Cmp(x0, x1);  // Compare to the array length.
   1200   __ B(hs, loop_statement.break_label());
   1201 
   1202   // Get the current entry of the array into register r3.
   1203   __ Peek(x10, 2 * kXRegSize);
   1204   __ Add(x10, x10, Operand::UntagSmiAndScale(x0, kPointerSizeLog2));
   1205   __ Ldr(x3, MemOperand(x10, FixedArray::kHeaderSize - kHeapObjectTag));
   1206 
   1207   // Get the expected map from the stack or a smi in the
   1208   // permanent slow case into register x10.
   1209   __ Peek(x2, 3 * kXRegSize);
   1210 
   1211   // Check if the expected map still matches that of the enumerable.
   1212   // If not, we may have to filter the key.
   1213   Label update_each;
   1214   __ Peek(x1, 4 * kXRegSize);
   1215   __ Ldr(x11, FieldMemOperand(x1, HeapObject::kMapOffset));
   1216   __ Cmp(x11, x2);
   1217   __ B(eq, &update_each);
   1218 
   1219   // For proxies, no filtering is done.
   1220   // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
   1221   STATIC_ASSERT(kSmiTag == 0);
   1222   __ Cbz(x2, &update_each);
   1223 
   1224   // Convert the entry to a string or (smi) 0 if it isn't a property
   1225   // any more. If the property has been removed while iterating, we
   1226   // just skip it.
   1227   __ Push(x1, x3);
   1228   __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
   1229   __ Mov(x3, x0);
   1230   __ Cbz(x0, loop_statement.continue_label());
   1231 
   1232   // Update the 'each' property or variable from the possibly filtered
   1233   // entry in register x3.
   1234   __ Bind(&update_each);
   1235   __ Mov(result_register(), x3);
   1236   // Perform the assignment as if via '='.
   1237   { EffectContext context(this);
   1238     EmitAssignment(stmt->each());
   1239   }
   1240 
   1241   // Generate code for the body of the loop.
   1242   Visit(stmt->body());
   1243 
   1244   // Generate code for going to the next element by incrementing
   1245   // the index (smi) stored on top of the stack.
   1246   __ Bind(loop_statement.continue_label());
   1247   // TODO(all): We could use a callee saved register to avoid popping.
   1248   __ Pop(x0);
   1249   __ Add(x0, x0, Smi::FromInt(1));
   1250   __ Push(x0);
   1251 
   1252   EmitBackEdgeBookkeeping(stmt, &loop);
   1253   __ B(&loop);
   1254 
   1255   // Remove the pointers stored on the stack.
   1256   __ Bind(loop_statement.break_label());
   1257   __ Drop(5);
   1258 
   1259   // Exit and decrement the loop depth.
   1260   PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
   1261   __ Bind(&exit);
   1262   decrement_loop_depth();
   1263 }
   1264 
   1265 
   1266 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
   1267   Comment cmnt(masm_, "[ ForOfStatement");
   1268   SetStatementPosition(stmt);
   1269 
   1270   Iteration loop_statement(this, stmt);
   1271   increment_loop_depth();
   1272 
   1273   // var iterable = subject
   1274   VisitForAccumulatorValue(stmt->assign_iterable());
   1275 
   1276   // As with for-in, skip the loop if the iterator is null or undefined.
   1277   Register iterator = x0;
   1278   __ JumpIfRoot(iterator, Heap::kUndefinedValueRootIndex,
   1279                 loop_statement.break_label());
   1280   __ JumpIfRoot(iterator, Heap::kNullValueRootIndex,
   1281                 loop_statement.break_label());
   1282 
   1283   // var iterator = iterable[Symbol.iterator]();
   1284   VisitForEffect(stmt->assign_iterator());
   1285 
   1286   // Loop entry.
   1287   __ Bind(loop_statement.continue_label());
   1288 
   1289   // result = iterator.next()
   1290   VisitForEffect(stmt->next_result());
   1291 
   1292   // if (result.done) break;
   1293   Label result_not_done;
   1294   VisitForControl(stmt->result_done(),
   1295                   loop_statement.break_label(),
   1296                   &result_not_done,
   1297                   &result_not_done);
   1298   __ Bind(&result_not_done);
   1299 
   1300   // each = result.value
   1301   VisitForEffect(stmt->assign_each());
   1302 
   1303   // Generate code for the body of the loop.
   1304   Visit(stmt->body());
   1305 
   1306   // Check stack before looping.
   1307   PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
   1308   EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
   1309   __ B(loop_statement.continue_label());
   1310 
   1311   // Exit and decrement the loop depth.
   1312   PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
   1313   __ Bind(loop_statement.break_label());
   1314   decrement_loop_depth();
   1315 }
   1316 
   1317 
   1318 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
   1319                                        bool pretenure) {
   1320   // Use the fast case closure allocation code that allocates in new space for
   1321   // nested functions that don't need literals cloning. If we're running with
   1322   // the --always-opt or the --prepare-always-opt flag, we need to use the
   1323   // runtime function so that the new function we are creating here gets a
   1324   // chance to have its code optimized and doesn't just get a copy of the
   1325   // existing unoptimized code.
   1326   if (!FLAG_always_opt &&
   1327       !FLAG_prepare_always_opt &&
   1328       !pretenure &&
   1329       scope()->is_function_scope() &&
   1330       info->num_literals() == 0) {
   1331     FastNewClosureStub stub(isolate(),
   1332                             info->strict_mode(),
   1333                             info->is_generator());
   1334     __ Mov(x2, Operand(info));
   1335     __ CallStub(&stub);
   1336   } else {
   1337     __ Mov(x11, Operand(info));
   1338     __ LoadRoot(x10, pretenure ? Heap::kTrueValueRootIndex
   1339                                : Heap::kFalseValueRootIndex);
   1340     __ Push(cp, x11, x10);
   1341     __ CallRuntime(Runtime::kHiddenNewClosure, 3);
   1342   }
   1343   context()->Plug(x0);
   1344 }
   1345 
   1346 
   1347 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
   1348   Comment cmnt(masm_, "[ VariableProxy");
   1349   EmitVariableLoad(expr);
   1350 }
   1351 
   1352 
   1353 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
   1354                                                       TypeofState typeof_state,
   1355                                                       Label* slow) {
   1356   Register current = cp;
   1357   Register next = x10;
   1358   Register temp = x11;
   1359 
   1360   Scope* s = scope();
   1361   while (s != NULL) {
   1362     if (s->num_heap_slots() > 0) {
   1363       if (s->calls_sloppy_eval()) {
   1364         // Check that extension is NULL.
   1365         __ Ldr(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
   1366         __ Cbnz(temp, slow);
   1367       }
   1368       // Load next context in chain.
   1369       __ Ldr(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
   1370       // Walk the rest of the chain without clobbering cp.
   1371       current = next;
   1372     }
   1373     // If no outer scope calls eval, we do not need to check more
   1374     // context extensions.
   1375     if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
   1376     s = s->outer_scope();
   1377   }
   1378 
   1379   if (s->is_eval_scope()) {
   1380     Label loop, fast;
   1381     __ Mov(next, current);
   1382 
   1383     __ Bind(&loop);
   1384     // Terminate at native context.
   1385     __ Ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
   1386     __ JumpIfRoot(temp, Heap::kNativeContextMapRootIndex, &fast);
   1387     // Check that extension is NULL.
   1388     __ Ldr(temp, ContextMemOperand(next, Context::EXTENSION_INDEX));
   1389     __ Cbnz(temp, slow);
   1390     // Load next context in chain.
   1391     __ Ldr(next, ContextMemOperand(next, Context::PREVIOUS_INDEX));
   1392     __ B(&loop);
   1393     __ Bind(&fast);
   1394   }
   1395 
   1396   __ Ldr(x0, GlobalObjectMemOperand());
   1397   __ Mov(x2, Operand(var->name()));
   1398   ContextualMode mode = (typeof_state == INSIDE_TYPEOF) ? NOT_CONTEXTUAL
   1399                                                         : CONTEXTUAL;
   1400   CallLoadIC(mode);
   1401 }
   1402 
   1403 
   1404 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
   1405                                                                 Label* slow) {
   1406   ASSERT(var->IsContextSlot());
   1407   Register context = cp;
   1408   Register next = x10;
   1409   Register temp = x11;
   1410 
   1411   for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
   1412     if (s->num_heap_slots() > 0) {
   1413       if (s->calls_sloppy_eval()) {
   1414         // Check that extension is NULL.
   1415         __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
   1416         __ Cbnz(temp, slow);
   1417       }
   1418       __ Ldr(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
   1419       // Walk the rest of the chain without clobbering cp.
   1420       context = next;
   1421     }
   1422   }
   1423   // Check that last extension is NULL.
   1424   __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
   1425   __ Cbnz(temp, slow);
   1426 
   1427   // This function is used only for loads, not stores, so it's safe to
   1428   // return an cp-based operand (the write barrier cannot be allowed to
   1429   // destroy the cp register).
   1430   return ContextMemOperand(context, var->index());
   1431 }
   1432 
   1433 
   1434 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
   1435                                                   TypeofState typeof_state,
   1436                                                   Label* slow,
   1437                                                   Label* done) {
   1438   // Generate fast-case code for variables that might be shadowed by
   1439   // eval-introduced variables.  Eval is used a lot without
   1440   // introducing variables.  In those cases, we do not want to
   1441   // perform a runtime call for all variables in the scope
   1442   // containing the eval.
   1443   if (var->mode() == DYNAMIC_GLOBAL) {
   1444     EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
   1445     __ B(done);
   1446   } else if (var->mode() == DYNAMIC_LOCAL) {
   1447     Variable* local = var->local_if_not_shadowed();
   1448     __ Ldr(x0, ContextSlotOperandCheckExtensions(local, slow));
   1449     if (local->mode() == LET || local->mode() == CONST ||
   1450         local->mode() == CONST_LEGACY) {
   1451       __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, done);
   1452       if (local->mode() == CONST_LEGACY) {
   1453         __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
   1454       } else {  // LET || CONST
   1455         __ Mov(x0, Operand(var->name()));
   1456         __ Push(x0);
   1457         __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
   1458       }
   1459     }
   1460     __ B(done);
   1461   }
   1462 }
   1463 
   1464 
   1465 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
   1466   // Record position before possible IC call.
   1467   SetSourcePosition(proxy->position());
   1468   Variable* var = proxy->var();
   1469 
   1470   // Three cases: global variables, lookup variables, and all other types of
   1471   // variables.
   1472   switch (var->location()) {
   1473     case Variable::UNALLOCATED: {
   1474       Comment cmnt(masm_, "Global variable");
   1475       // Use inline caching. Variable name is passed in x2 and the global
   1476       // object (receiver) in x0.
   1477       __ Ldr(x0, GlobalObjectMemOperand());
   1478       __ Mov(x2, Operand(var->name()));
   1479       CallLoadIC(CONTEXTUAL);
   1480       context()->Plug(x0);
   1481       break;
   1482     }
   1483 
   1484     case Variable::PARAMETER:
   1485     case Variable::LOCAL:
   1486     case Variable::CONTEXT: {
   1487       Comment cmnt(masm_, var->IsContextSlot()
   1488                               ? "Context variable"
   1489                               : "Stack variable");
   1490       if (var->binding_needs_init()) {
   1491         // var->scope() may be NULL when the proxy is located in eval code and
   1492         // refers to a potential outside binding. Currently those bindings are
   1493         // always looked up dynamically, i.e. in that case
   1494         //     var->location() == LOOKUP.
   1495         // always holds.
   1496         ASSERT(var->scope() != NULL);
   1497 
   1498         // Check if the binding really needs an initialization check. The check
   1499         // can be skipped in the following situation: we have a LET or CONST
   1500         // binding in harmony mode, both the Variable and the VariableProxy have
   1501         // the same declaration scope (i.e. they are both in global code, in the
   1502         // same function or in the same eval code) and the VariableProxy is in
   1503         // the source physically located after the initializer of the variable.
   1504         //
   1505         // We cannot skip any initialization checks for CONST in non-harmony
   1506         // mode because const variables may be declared but never initialized:
   1507         //   if (false) { const x; }; var y = x;
   1508         //
   1509         // The condition on the declaration scopes is a conservative check for
   1510         // nested functions that access a binding and are called before the
   1511         // binding is initialized:
   1512         //   function() { f(); let x = 1; function f() { x = 2; } }
   1513         //
   1514         bool skip_init_check;
   1515         if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
   1516           skip_init_check = false;
   1517         } else {
   1518           // Check that we always have valid source position.
   1519           ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
   1520           ASSERT(proxy->position() != RelocInfo::kNoPosition);
   1521           skip_init_check = var->mode() != CONST_LEGACY &&
   1522               var->initializer_position() < proxy->position();
   1523         }
   1524 
   1525         if (!skip_init_check) {
   1526           // Let and const need a read barrier.
   1527           GetVar(x0, var);
   1528           Label done;
   1529           __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, &done);
   1530           if (var->mode() == LET || var->mode() == CONST) {
   1531             // Throw a reference error when using an uninitialized let/const
   1532             // binding in harmony mode.
   1533             __ Mov(x0, Operand(var->name()));
   1534             __ Push(x0);
   1535             __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
   1536             __ Bind(&done);
   1537           } else {
   1538             // Uninitalized const bindings outside of harmony mode are unholed.
   1539             ASSERT(var->mode() == CONST_LEGACY);
   1540             __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
   1541             __ Bind(&done);
   1542           }
   1543           context()->Plug(x0);
   1544           break;
   1545         }
   1546       }
   1547       context()->Plug(var);
   1548       break;
   1549     }
   1550 
   1551     case Variable::LOOKUP: {
   1552       Label done, slow;
   1553       // Generate code for loading from variables potentially shadowed by
   1554       // eval-introduced variables.
   1555       EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
   1556       __ Bind(&slow);
   1557       Comment cmnt(masm_, "Lookup variable");
   1558       __ Mov(x1, Operand(var->name()));
   1559       __ Push(cp, x1);  // Context and name.
   1560       __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2);
   1561       __ Bind(&done);
   1562       context()->Plug(x0);
   1563       break;
   1564     }
   1565   }
   1566 }
   1567 
   1568 
   1569 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
   1570   Comment cmnt(masm_, "[ RegExpLiteral");
   1571   Label materialized;
   1572   // Registers will be used as follows:
   1573   // x5 = materialized value (RegExp literal)
   1574   // x4 = JS function, literals array
   1575   // x3 = literal index
   1576   // x2 = RegExp pattern
   1577   // x1 = RegExp flags
   1578   // x0 = RegExp literal clone
   1579   __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
   1580   __ Ldr(x4, FieldMemOperand(x10, JSFunction::kLiteralsOffset));
   1581   int literal_offset =
   1582       FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
   1583   __ Ldr(x5, FieldMemOperand(x4, literal_offset));
   1584   __ JumpIfNotRoot(x5, Heap::kUndefinedValueRootIndex, &materialized);
   1585 
   1586   // Create regexp literal using runtime function.
   1587   // Result will be in x0.
   1588   __ Mov(x3, Smi::FromInt(expr->literal_index()));
   1589   __ Mov(x2, Operand(expr->pattern()));
   1590   __ Mov(x1, Operand(expr->flags()));
   1591   __ Push(x4, x3, x2, x1);
   1592   __ CallRuntime(Runtime::kHiddenMaterializeRegExpLiteral, 4);
   1593   __ Mov(x5, x0);
   1594 
   1595   __ Bind(&materialized);
   1596   int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
   1597   Label allocated, runtime_allocate;
   1598   __ Allocate(size, x0, x2, x3, &runtime_allocate, TAG_OBJECT);
   1599   __ B(&allocated);
   1600 
   1601   __ Bind(&runtime_allocate);
   1602   __ Mov(x10, Smi::FromInt(size));
   1603   __ Push(x5, x10);
   1604   __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1);
   1605   __ Pop(x5);
   1606 
   1607   __ Bind(&allocated);
   1608   // After this, registers are used as follows:
   1609   // x0: Newly allocated regexp.
   1610   // x5: Materialized regexp.
   1611   // x10, x11, x12: temps.
   1612   __ CopyFields(x0, x5, CPURegList(x10, x11, x12), size / kPointerSize);
   1613   context()->Plug(x0);
   1614 }
   1615 
   1616 
   1617 void FullCodeGenerator::EmitAccessor(Expression* expression) {
   1618   if (expression == NULL) {
   1619     __ LoadRoot(x10, Heap::kNullValueRootIndex);
   1620     __ Push(x10);
   1621   } else {
   1622     VisitForStackValue(expression);
   1623   }
   1624 }
   1625 
   1626 
   1627 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
   1628   Comment cmnt(masm_, "[ ObjectLiteral");
   1629 
   1630   expr->BuildConstantProperties(isolate());
   1631   Handle<FixedArray> constant_properties = expr->constant_properties();
   1632   __ Ldr(x3, MemOperand(fp,  JavaScriptFrameConstants::kFunctionOffset));
   1633   __ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset));
   1634   __ Mov(x2, Smi::FromInt(expr->literal_index()));
   1635   __ Mov(x1, Operand(constant_properties));
   1636   int flags = expr->fast_elements()
   1637       ? ObjectLiteral::kFastElements
   1638       : ObjectLiteral::kNoFlags;
   1639   flags |= expr->has_function()
   1640       ? ObjectLiteral::kHasFunction
   1641       : ObjectLiteral::kNoFlags;
   1642   __ Mov(x0, Smi::FromInt(flags));
   1643   int properties_count = constant_properties->length() / 2;
   1644   const int max_cloned_properties =
   1645       FastCloneShallowObjectStub::kMaximumClonedProperties;
   1646   if (expr->may_store_doubles() || expr->depth() > 1 ||
   1647       masm()->serializer_enabled() || flags != ObjectLiteral::kFastElements ||
   1648       properties_count > max_cloned_properties) {
   1649     __ Push(x3, x2, x1, x0);
   1650     __ CallRuntime(Runtime::kHiddenCreateObjectLiteral, 4);
   1651   } else {
   1652     FastCloneShallowObjectStub stub(isolate(), properties_count);
   1653     __ CallStub(&stub);
   1654   }
   1655 
   1656   // If result_saved is true the result is on top of the stack.  If
   1657   // result_saved is false the result is in x0.
   1658   bool result_saved = false;
   1659 
   1660   // Mark all computed expressions that are bound to a key that
   1661   // is shadowed by a later occurrence of the same key. For the
   1662   // marked expressions, no store code is emitted.
   1663   expr->CalculateEmitStore(zone());
   1664 
   1665   AccessorTable accessor_table(zone());
   1666   for (int i = 0; i < expr->properties()->length(); i++) {
   1667     ObjectLiteral::Property* property = expr->properties()->at(i);
   1668     if (property->IsCompileTimeValue()) continue;
   1669 
   1670     Literal* key = property->key();
   1671     Expression* value = property->value();
   1672     if (!result_saved) {
   1673       __ Push(x0);  // Save result on stack
   1674       result_saved = true;
   1675     }
   1676     switch (property->kind()) {
   1677       case ObjectLiteral::Property::CONSTANT:
   1678         UNREACHABLE();
   1679       case ObjectLiteral::Property::MATERIALIZED_LITERAL:
   1680         ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value()));
   1681         // Fall through.
   1682       case ObjectLiteral::Property::COMPUTED:
   1683         if (key->value()->IsInternalizedString()) {
   1684           if (property->emit_store()) {
   1685             VisitForAccumulatorValue(value);
   1686             __ Mov(x2, Operand(key->value()));
   1687             __ Peek(x1, 0);
   1688             CallStoreIC(key->LiteralFeedbackId());
   1689             PrepareForBailoutForId(key->id(), NO_REGISTERS);
   1690           } else {
   1691             VisitForEffect(value);
   1692           }
   1693           break;
   1694         }
   1695         if (property->emit_store()) {
   1696           // Duplicate receiver on stack.
   1697           __ Peek(x0, 0);
   1698           __ Push(x0);
   1699           VisitForStackValue(key);
   1700           VisitForStackValue(value);
   1701           __ Mov(x0, Smi::FromInt(NONE));  // PropertyAttributes
   1702           __ Push(x0);
   1703           __ CallRuntime(Runtime::kSetProperty, 4);
   1704         } else {
   1705           VisitForEffect(key);
   1706           VisitForEffect(value);
   1707         }
   1708         break;
   1709       case ObjectLiteral::Property::PROTOTYPE:
   1710         if (property->emit_store()) {
   1711           // Duplicate receiver on stack.
   1712           __ Peek(x0, 0);
   1713           __ Push(x0);
   1714           VisitForStackValue(value);
   1715           __ CallRuntime(Runtime::kSetPrototype, 2);
   1716         } else {
   1717           VisitForEffect(value);
   1718         }
   1719         break;
   1720       case ObjectLiteral::Property::GETTER:
   1721         accessor_table.lookup(key)->second->getter = value;
   1722         break;
   1723       case ObjectLiteral::Property::SETTER:
   1724         accessor_table.lookup(key)->second->setter = value;
   1725         break;
   1726     }
   1727   }
   1728 
   1729   // Emit code to define accessors, using only a single call to the runtime for
   1730   // each pair of corresponding getters and setters.
   1731   for (AccessorTable::Iterator it = accessor_table.begin();
   1732        it != accessor_table.end();
   1733        ++it) {
   1734       __ Peek(x10, 0);  // Duplicate receiver.
   1735       __ Push(x10);
   1736       VisitForStackValue(it->first);
   1737       EmitAccessor(it->second->getter);
   1738       EmitAccessor(it->second->setter);
   1739       __ Mov(x10, Smi::FromInt(NONE));
   1740       __ Push(x10);
   1741       __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
   1742   }
   1743 
   1744   if (expr->has_function()) {
   1745     ASSERT(result_saved);
   1746     __ Peek(x0, 0);
   1747     __ Push(x0);
   1748     __ CallRuntime(Runtime::kToFastProperties, 1);
   1749   }
   1750 
   1751   if (result_saved) {
   1752     context()->PlugTOS();
   1753   } else {
   1754     context()->Plug(x0);
   1755   }
   1756 }
   1757 
   1758 
   1759 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
   1760   Comment cmnt(masm_, "[ ArrayLiteral");
   1761 
   1762   expr->BuildConstantElements(isolate());
   1763   int flags = (expr->depth() == 1) ? ArrayLiteral::kShallowElements
   1764                                    : ArrayLiteral::kNoFlags;
   1765 
   1766   ZoneList<Expression*>* subexprs = expr->values();
   1767   int length = subexprs->length();
   1768   Handle<FixedArray> constant_elements = expr->constant_elements();
   1769   ASSERT_EQ(2, constant_elements->length());
   1770   ElementsKind constant_elements_kind =
   1771       static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
   1772   bool has_fast_elements = IsFastObjectElementsKind(constant_elements_kind);
   1773   Handle<FixedArrayBase> constant_elements_values(
   1774       FixedArrayBase::cast(constant_elements->get(1)));
   1775 
   1776   AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
   1777   if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
   1778     // If the only customer of allocation sites is transitioning, then
   1779     // we can turn it off if we don't have anywhere else to transition to.
   1780     allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
   1781   }
   1782 
   1783   __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
   1784   __ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset));
   1785   __ Mov(x2, Smi::FromInt(expr->literal_index()));
   1786   __ Mov(x1, Operand(constant_elements));
   1787   if (expr->depth() > 1 || length > JSObject::kInitialMaxFastElementArray) {
   1788     __ Mov(x0, Smi::FromInt(flags));
   1789     __ Push(x3, x2, x1, x0);
   1790     __ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);
   1791   } else {
   1792     FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
   1793     __ CallStub(&stub);
   1794   }
   1795 
   1796   bool result_saved = false;  // Is the result saved to the stack?
   1797 
   1798   // Emit code to evaluate all the non-constant subexpressions and to store
   1799   // them into the newly cloned array.
   1800   for (int i = 0; i < length; i++) {
   1801     Expression* subexpr = subexprs->at(i);
   1802     // If the subexpression is a literal or a simple materialized literal it
   1803     // is already set in the cloned array.
   1804     if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
   1805 
   1806     if (!result_saved) {
   1807       __ Push(x0);
   1808       __ Push(Smi::FromInt(expr->literal_index()));
   1809       result_saved = true;
   1810     }
   1811     VisitForAccumulatorValue(subexpr);
   1812 
   1813     if (IsFastObjectElementsKind(constant_elements_kind)) {
   1814       int offset = FixedArray::kHeaderSize + (i * kPointerSize);
   1815       __ Peek(x6, kPointerSize);  // Copy of array literal.
   1816       __ Ldr(x1, FieldMemOperand(x6, JSObject::kElementsOffset));
   1817       __ Str(result_register(), FieldMemOperand(x1, offset));
   1818       // Update the write barrier for the array store.
   1819       __ RecordWriteField(x1, offset, result_register(), x10,
   1820                           kLRHasBeenSaved, kDontSaveFPRegs,
   1821                           EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
   1822     } else {
   1823       __ Mov(x3, Smi::FromInt(i));
   1824       StoreArrayLiteralElementStub stub(isolate());
   1825       __ CallStub(&stub);
   1826     }
   1827 
   1828     PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
   1829   }
   1830 
   1831   if (result_saved) {
   1832     __ Drop(1);   // literal index
   1833     context()->PlugTOS();
   1834   } else {
   1835     context()->Plug(x0);
   1836   }
   1837 }
   1838 
   1839 
   1840 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
   1841   ASSERT(expr->target()->IsValidReferenceExpression());
   1842 
   1843   Comment cmnt(masm_, "[ Assignment");
   1844 
   1845   // Left-hand side can only be a property, a global or a (parameter or local)
   1846   // slot.
   1847   enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
   1848   LhsKind assign_type = VARIABLE;
   1849   Property* property = expr->target()->AsProperty();
   1850   if (property != NULL) {
   1851     assign_type = (property->key()->IsPropertyName())
   1852         ? NAMED_PROPERTY
   1853         : KEYED_PROPERTY;
   1854   }
   1855 
   1856   // Evaluate LHS expression.
   1857   switch (assign_type) {
   1858     case VARIABLE:
   1859       // Nothing to do here.
   1860       break;
   1861     case NAMED_PROPERTY:
   1862       if (expr->is_compound()) {
   1863         // We need the receiver both on the stack and in the accumulator.
   1864         VisitForAccumulatorValue(property->obj());
   1865         __ Push(result_register());
   1866       } else {
   1867         VisitForStackValue(property->obj());
   1868       }
   1869       break;
   1870     case KEYED_PROPERTY:
   1871       if (expr->is_compound()) {
   1872         VisitForStackValue(property->obj());
   1873         VisitForAccumulatorValue(property->key());
   1874         __ Peek(x1, 0);
   1875         __ Push(x0);
   1876       } else {
   1877         VisitForStackValue(property->obj());
   1878         VisitForStackValue(property->key());
   1879       }
   1880       break;
   1881   }
   1882 
   1883   // For compound assignments we need another deoptimization point after the
   1884   // variable/property load.
   1885   if (expr->is_compound()) {
   1886     { AccumulatorValueContext context(this);
   1887       switch (assign_type) {
   1888         case VARIABLE:
   1889           EmitVariableLoad(expr->target()->AsVariableProxy());
   1890           PrepareForBailout(expr->target(), TOS_REG);
   1891           break;
   1892         case NAMED_PROPERTY:
   1893           EmitNamedPropertyLoad(property);
   1894           PrepareForBailoutForId(property->LoadId(), TOS_REG);
   1895           break;
   1896         case KEYED_PROPERTY:
   1897           EmitKeyedPropertyLoad(property);
   1898           PrepareForBailoutForId(property->LoadId(), TOS_REG);
   1899           break;
   1900       }
   1901     }
   1902 
   1903     Token::Value op = expr->binary_op();
   1904     __ Push(x0);  // Left operand goes on the stack.
   1905     VisitForAccumulatorValue(expr->value());
   1906 
   1907     OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
   1908         ? OVERWRITE_RIGHT
   1909         : NO_OVERWRITE;
   1910     SetSourcePosition(expr->position() + 1);
   1911     AccumulatorValueContext context(this);
   1912     if (ShouldInlineSmiCase(op)) {
   1913       EmitInlineSmiBinaryOp(expr->binary_operation(),
   1914                             op,
   1915                             mode,
   1916                             expr->target(),
   1917                             expr->value());
   1918     } else {
   1919       EmitBinaryOp(expr->binary_operation(), op, mode);
   1920     }
   1921 
   1922     // Deoptimization point in case the binary operation may have side effects.
   1923     PrepareForBailout(expr->binary_operation(), TOS_REG);
   1924   } else {
   1925     VisitForAccumulatorValue(expr->value());
   1926   }
   1927 
   1928   // Record source position before possible IC call.
   1929   SetSourcePosition(expr->position());
   1930 
   1931   // Store the value.
   1932   switch (assign_type) {
   1933     case VARIABLE:
   1934       EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
   1935                              expr->op());
   1936       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   1937       context()->Plug(x0);
   1938       break;
   1939     case NAMED_PROPERTY:
   1940       EmitNamedPropertyAssignment(expr);
   1941       break;
   1942     case KEYED_PROPERTY:
   1943       EmitKeyedPropertyAssignment(expr);
   1944       break;
   1945   }
   1946 }
   1947 
   1948 
   1949 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
   1950   SetSourcePosition(prop->position());
   1951   Literal* key = prop->key()->AsLiteral();
   1952   __ Mov(x2, Operand(key->value()));
   1953   // Call load IC. It has arguments receiver and property name x0 and x2.
   1954   CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
   1955 }
   1956 
   1957 
   1958 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
   1959   SetSourcePosition(prop->position());
   1960   // Call keyed load IC. It has arguments key and receiver in r0 and r1.
   1961   Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
   1962   CallIC(ic, prop->PropertyFeedbackId());
   1963 }
   1964 
   1965 
   1966 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
   1967                                               Token::Value op,
   1968                                               OverwriteMode mode,
   1969                                               Expression* left_expr,
   1970                                               Expression* right_expr) {
   1971   Label done, both_smis, stub_call;
   1972 
   1973   // Get the arguments.
   1974   Register left = x1;
   1975   Register right = x0;
   1976   Register result = x0;
   1977   __ Pop(left);
   1978 
   1979   // Perform combined smi check on both operands.
   1980   __ Orr(x10, left, right);
   1981   JumpPatchSite patch_site(masm_);
   1982   patch_site.EmitJumpIfSmi(x10, &both_smis);
   1983 
   1984   __ Bind(&stub_call);
   1985   BinaryOpICStub stub(isolate(), op, mode);
   1986   {
   1987     Assembler::BlockPoolsScope scope(masm_);
   1988     CallIC(stub.GetCode(), expr->BinaryOperationFeedbackId());
   1989     patch_site.EmitPatchInfo();
   1990   }
   1991   __ B(&done);
   1992 
   1993   __ Bind(&both_smis);
   1994   // Smi case. This code works in the same way as the smi-smi case in the type
   1995   // recording binary operation stub, see
   1996   // BinaryOpStub::GenerateSmiSmiOperation for comments.
   1997   // TODO(all): That doesn't exist any more. Where are the comments?
   1998   //
   1999   // The set of operations that needs to be supported here is controlled by
   2000   // FullCodeGenerator::ShouldInlineSmiCase().
   2001   switch (op) {
   2002     case Token::SAR:
   2003       __ Ubfx(right, right, kSmiShift, 5);
   2004       __ Asr(result, left, right);
   2005       __ Bic(result, result, kSmiShiftMask);
   2006       break;
   2007     case Token::SHL:
   2008       __ Ubfx(right, right, kSmiShift, 5);
   2009       __ Lsl(result, left, right);
   2010       break;
   2011     case Token::SHR: {
   2012       Label right_not_zero;
   2013       __ Cbnz(right, &right_not_zero);
   2014       __ Tbnz(left, kXSignBit, &stub_call);
   2015       __ Bind(&right_not_zero);
   2016       __ Ubfx(right, right, kSmiShift, 5);
   2017       __ Lsr(result, left, right);
   2018       __ Bic(result, result, kSmiShiftMask);
   2019       break;
   2020     }
   2021     case Token::ADD:
   2022       __ Adds(x10, left, right);
   2023       __ B(vs, &stub_call);
   2024       __ Mov(result, x10);
   2025       break;
   2026     case Token::SUB:
   2027       __ Subs(x10, left, right);
   2028       __ B(vs, &stub_call);
   2029       __ Mov(result, x10);
   2030       break;
   2031     case Token::MUL: {
   2032       Label not_minus_zero, done;
   2033       __ Smulh(x10, left, right);
   2034       __ Cbnz(x10, &not_minus_zero);
   2035       __ Eor(x11, left, right);
   2036       __ Tbnz(x11, kXSignBit, &stub_call);
   2037       STATIC_ASSERT(kSmiTag == 0);
   2038       __ Mov(result, x10);
   2039       __ B(&done);
   2040       __ Bind(&not_minus_zero);
   2041       __ Cls(x11, x10);
   2042       __ Cmp(x11, kXRegSizeInBits - kSmiShift);
   2043       __ B(lt, &stub_call);
   2044       __ SmiTag(result, x10);
   2045       __ Bind(&done);
   2046       break;
   2047     }
   2048     case Token::BIT_OR:
   2049       __ Orr(result, left, right);
   2050       break;
   2051     case Token::BIT_AND:
   2052       __ And(result, left, right);
   2053       break;
   2054     case Token::BIT_XOR:
   2055       __ Eor(result, left, right);
   2056       break;
   2057     default:
   2058       UNREACHABLE();
   2059   }
   2060 
   2061   __ Bind(&done);
   2062   context()->Plug(x0);
   2063 }
   2064 
   2065 
   2066 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
   2067                                      Token::Value op,
   2068                                      OverwriteMode mode) {
   2069   __ Pop(x1);
   2070   BinaryOpICStub stub(isolate(), op, mode);
   2071   JumpPatchSite patch_site(masm_);    // Unbound, signals no inlined smi code.
   2072   {
   2073     Assembler::BlockPoolsScope scope(masm_);
   2074     CallIC(stub.GetCode(), expr->BinaryOperationFeedbackId());
   2075     patch_site.EmitPatchInfo();
   2076   }
   2077   context()->Plug(x0);
   2078 }
   2079 
   2080 
   2081 void FullCodeGenerator::EmitAssignment(Expression* expr) {
   2082   ASSERT(expr->IsValidReferenceExpression());
   2083 
   2084   // Left-hand side can only be a property, a global or a (parameter or local)
   2085   // slot.
   2086   enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
   2087   LhsKind assign_type = VARIABLE;
   2088   Property* prop = expr->AsProperty();
   2089   if (prop != NULL) {
   2090     assign_type = (prop->key()->IsPropertyName())
   2091         ? NAMED_PROPERTY
   2092         : KEYED_PROPERTY;
   2093   }
   2094 
   2095   switch (assign_type) {
   2096     case VARIABLE: {
   2097       Variable* var = expr->AsVariableProxy()->var();
   2098       EffectContext context(this);
   2099       EmitVariableAssignment(var, Token::ASSIGN);
   2100       break;
   2101     }
   2102     case NAMED_PROPERTY: {
   2103       __ Push(x0);  // Preserve value.
   2104       VisitForAccumulatorValue(prop->obj());
   2105       // TODO(all): We could introduce a VisitForRegValue(reg, expr) to avoid
   2106       // this copy.
   2107       __ Mov(x1, x0);
   2108       __ Pop(x0);  // Restore value.
   2109       __ Mov(x2, Operand(prop->key()->AsLiteral()->value()));
   2110       CallStoreIC();
   2111       break;
   2112     }
   2113     case KEYED_PROPERTY: {
   2114       __ Push(x0);  // Preserve value.
   2115       VisitForStackValue(prop->obj());
   2116       VisitForAccumulatorValue(prop->key());
   2117       __ Mov(x1, x0);
   2118       __ Pop(x2, x0);
   2119       Handle<Code> ic = strict_mode() == SLOPPY
   2120           ? isolate()->builtins()->KeyedStoreIC_Initialize()
   2121           : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
   2122       CallIC(ic);
   2123       break;
   2124     }
   2125   }
   2126   context()->Plug(x0);
   2127 }
   2128 
   2129 
   2130 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
   2131     Variable* var, MemOperand location) {
   2132   __ Str(result_register(), location);
   2133   if (var->IsContextSlot()) {
   2134     // RecordWrite may destroy all its register arguments.
   2135     __ Mov(x10, result_register());
   2136     int offset = Context::SlotOffset(var->index());
   2137     __ RecordWriteContextSlot(
   2138         x1, offset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
   2139   }
   2140 }
   2141 
   2142 
   2143 void FullCodeGenerator::EmitCallStoreContextSlot(
   2144     Handle<String> name, StrictMode strict_mode) {
   2145   __ Mov(x11, Operand(name));
   2146   __ Mov(x10, Smi::FromInt(strict_mode));
   2147   // jssp[0]  : mode.
   2148   // jssp[8]  : name.
   2149   // jssp[16] : context.
   2150   // jssp[24] : value.
   2151   __ Push(x0, cp, x11, x10);
   2152   __ CallRuntime(Runtime::kHiddenStoreContextSlot, 4);
   2153 }
   2154 
   2155 
   2156 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
   2157                                                Token::Value op) {
   2158   ASM_LOCATION("FullCodeGenerator::EmitVariableAssignment");
   2159   if (var->IsUnallocated()) {
   2160     // Global var, const, or let.
   2161     __ Mov(x2, Operand(var->name()));
   2162     __ Ldr(x1, GlobalObjectMemOperand());
   2163     CallStoreIC();
   2164 
   2165   } else if (op == Token::INIT_CONST_LEGACY) {
   2166     // Const initializers need a write barrier.
   2167     ASSERT(!var->IsParameter());  // No const parameters.
   2168     if (var->IsLookupSlot()) {
   2169       __ Push(x0);
   2170       __ Mov(x0, Operand(var->name()));
   2171       __ Push(cp, x0);  // Context and name.
   2172       __ CallRuntime(Runtime::kHiddenInitializeConstContextSlot, 3);
   2173     } else {
   2174       ASSERT(var->IsStackLocal() || var->IsContextSlot());
   2175       Label skip;
   2176       MemOperand location = VarOperand(var, x1);
   2177       __ Ldr(x10, location);
   2178       __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &skip);
   2179       EmitStoreToStackLocalOrContextSlot(var, location);
   2180       __ Bind(&skip);
   2181     }
   2182 
   2183   } else if (var->mode() == LET && op != Token::INIT_LET) {
   2184     // Non-initializing assignment to let variable needs a write barrier.
   2185     if (var->IsLookupSlot()) {
   2186       EmitCallStoreContextSlot(var->name(), strict_mode());
   2187     } else {
   2188       ASSERT(var->IsStackAllocated() || var->IsContextSlot());
   2189       Label assign;
   2190       MemOperand location = VarOperand(var, x1);
   2191       __ Ldr(x10, location);
   2192       __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &assign);
   2193       __ Mov(x10, Operand(var->name()));
   2194       __ Push(x10);
   2195       __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
   2196       // Perform the assignment.
   2197       __ Bind(&assign);
   2198       EmitStoreToStackLocalOrContextSlot(var, location);
   2199     }
   2200 
   2201   } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
   2202     // Assignment to var or initializing assignment to let/const
   2203     // in harmony mode.
   2204     if (var->IsLookupSlot()) {
   2205       EmitCallStoreContextSlot(var->name(), strict_mode());
   2206     } else {
   2207       ASSERT(var->IsStackAllocated() || var->IsContextSlot());
   2208       MemOperand location = VarOperand(var, x1);
   2209       if (FLAG_debug_code && op == Token::INIT_LET) {
   2210         __ Ldr(x10, location);
   2211         __ CompareRoot(x10, Heap::kTheHoleValueRootIndex);
   2212         __ Check(eq, kLetBindingReInitialization);
   2213       }
   2214       EmitStoreToStackLocalOrContextSlot(var, location);
   2215     }
   2216   }
   2217   // Non-initializing assignments to consts are ignored.
   2218 }
   2219 
   2220 
   2221 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
   2222   ASM_LOCATION("FullCodeGenerator::EmitNamedPropertyAssignment");
   2223   // Assignment to a property, using a named store IC.
   2224   Property* prop = expr->target()->AsProperty();
   2225   ASSERT(prop != NULL);
   2226   ASSERT(prop->key()->IsLiteral());
   2227 
   2228   // Record source code position before IC call.
   2229   SetSourcePosition(expr->position());
   2230   __ Mov(x2, Operand(prop->key()->AsLiteral()->value()));
   2231   __ Pop(x1);
   2232 
   2233   CallStoreIC(expr->AssignmentFeedbackId());
   2234 
   2235   PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   2236   context()->Plug(x0);
   2237 }
   2238 
   2239 
   2240 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
   2241   ASM_LOCATION("FullCodeGenerator::EmitKeyedPropertyAssignment");
   2242   // Assignment to a property, using a keyed store IC.
   2243 
   2244   // Record source code position before IC call.
   2245   SetSourcePosition(expr->position());
   2246   // TODO(all): Could we pass this in registers rather than on the stack?
   2247   __ Pop(x1, x2);  // Key and object holding the property.
   2248 
   2249   Handle<Code> ic = strict_mode() == SLOPPY
   2250       ? isolate()->builtins()->KeyedStoreIC_Initialize()
   2251       : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
   2252   CallIC(ic, expr->AssignmentFeedbackId());
   2253 
   2254   PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   2255   context()->Plug(x0);
   2256 }
   2257 
   2258 
   2259 void FullCodeGenerator::VisitProperty(Property* expr) {
   2260   Comment cmnt(masm_, "[ Property");
   2261   Expression* key = expr->key();
   2262 
   2263   if (key->IsPropertyName()) {
   2264     VisitForAccumulatorValue(expr->obj());
   2265     EmitNamedPropertyLoad(expr);
   2266     PrepareForBailoutForId(expr->LoadId(), TOS_REG);
   2267     context()->Plug(x0);
   2268   } else {
   2269     VisitForStackValue(expr->obj());
   2270     VisitForAccumulatorValue(expr->key());
   2271     __ Pop(x1);
   2272     EmitKeyedPropertyLoad(expr);
   2273     context()->Plug(x0);
   2274   }
   2275 }
   2276 
   2277 
   2278 void FullCodeGenerator::CallIC(Handle<Code> code,
   2279                                TypeFeedbackId ast_id) {
   2280   ic_total_count_++;
   2281   // All calls must have a predictable size in full-codegen code to ensure that
   2282   // the debugger can patch them correctly.
   2283   __ Call(code, RelocInfo::CODE_TARGET, ast_id);
   2284 }
   2285 
   2286 
   2287 // Code common for calls using the IC.
   2288 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
   2289   Expression* callee = expr->expression();
   2290 
   2291   CallIC::CallType call_type = callee->IsVariableProxy()
   2292       ? CallIC::FUNCTION
   2293       : CallIC::METHOD;
   2294 
   2295   // Get the target function.
   2296   if (call_type == CallIC::FUNCTION) {
   2297     { StackValueContext context(this);
   2298       EmitVariableLoad(callee->AsVariableProxy());
   2299       PrepareForBailout(callee, NO_REGISTERS);
   2300     }
   2301     // Push undefined as receiver. This is patched in the method prologue if it
   2302     // is a sloppy mode method.
   2303     __ Push(isolate()->factory()->undefined_value());
   2304   } else {
   2305     // Load the function from the receiver.
   2306     ASSERT(callee->IsProperty());
   2307     __ Peek(x0, 0);
   2308     EmitNamedPropertyLoad(callee->AsProperty());
   2309     PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
   2310     // Push the target function under the receiver.
   2311     __ Pop(x10);
   2312     __ Push(x0, x10);
   2313   }
   2314 
   2315   EmitCall(expr, call_type);
   2316 }
   2317 
   2318 
   2319 // Code common for calls using the IC.
   2320 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
   2321                                                 Expression* key) {
   2322   // Load the key.
   2323   VisitForAccumulatorValue(key);
   2324 
   2325   Expression* callee = expr->expression();
   2326 
   2327   // Load the function from the receiver.
   2328   ASSERT(callee->IsProperty());
   2329   __ Peek(x1, 0);
   2330   EmitKeyedPropertyLoad(callee->AsProperty());
   2331   PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
   2332 
   2333   // Push the target function under the receiver.
   2334   __ Pop(x10);
   2335   __ Push(x0, x10);
   2336 
   2337   EmitCall(expr, CallIC::METHOD);
   2338 }
   2339 
   2340 
   2341 void FullCodeGenerator::EmitCall(Call* expr, CallIC::CallType call_type) {
   2342   // Load the arguments.
   2343   ZoneList<Expression*>* args = expr->arguments();
   2344   int arg_count = args->length();
   2345   { PreservePositionScope scope(masm()->positions_recorder());
   2346     for (int i = 0; i < arg_count; i++) {
   2347       VisitForStackValue(args->at(i));
   2348     }
   2349   }
   2350   // Record source position of the IC call.
   2351   SetSourcePosition(expr->position());
   2352 
   2353   Handle<Code> ic = CallIC::initialize_stub(
   2354       isolate(), arg_count, call_type);
   2355   __ Mov(x3, Smi::FromInt(expr->CallFeedbackSlot()));
   2356   __ Peek(x1, (arg_count + 1) * kXRegSize);
   2357   // Don't assign a type feedback id to the IC, since type feedback is provided
   2358   // by the vector above.
   2359   CallIC(ic);
   2360 
   2361   RecordJSReturnSite(expr);
   2362   // Restore context register.
   2363   __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
   2364   context()->DropAndPlug(1, x0);
   2365 }
   2366 
   2367 
   2368 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
   2369   ASM_LOCATION("FullCodeGenerator::EmitResolvePossiblyDirectEval");
   2370   // Prepare to push a copy of the first argument or undefined if it doesn't
   2371   // exist.
   2372   if (arg_count > 0) {
   2373     __ Peek(x10, arg_count * kXRegSize);
   2374   } else {
   2375     __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
   2376   }
   2377 
   2378   // Prepare to push the receiver of the enclosing function.
   2379   int receiver_offset = 2 + info_->scope()->num_parameters();
   2380   __ Ldr(x11, MemOperand(fp, receiver_offset * kPointerSize));
   2381 
   2382   // Push.
   2383   __ Push(x10, x11);
   2384 
   2385   // Prepare to push the language mode.
   2386   __ Mov(x10, Smi::FromInt(strict_mode()));
   2387   // Prepare to push the start position of the scope the calls resides in.
   2388   __ Mov(x11, Smi::FromInt(scope()->start_position()));
   2389 
   2390   // Push.
   2391   __ Push(x10, x11);
   2392 
   2393   // Do the runtime call.
   2394   __ CallRuntime(Runtime::kHiddenResolvePossiblyDirectEval, 5);
   2395 }
   2396 
   2397 
   2398 void FullCodeGenerator::VisitCall(Call* expr) {
   2399 #ifdef DEBUG
   2400   // We want to verify that RecordJSReturnSite gets called on all paths
   2401   // through this function.  Avoid early returns.
   2402   expr->return_is_recorded_ = false;
   2403 #endif
   2404 
   2405   Comment cmnt(masm_, "[ Call");
   2406   Expression* callee = expr->expression();
   2407   Call::CallType call_type = expr->GetCallType(isolate());
   2408 
   2409   if (call_type == Call::POSSIBLY_EVAL_CALL) {
   2410     // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
   2411     // to resolve the function we need to call and the receiver of the
   2412     // call.  Then we call the resolved function using the given
   2413     // arguments.
   2414     ZoneList<Expression*>* args = expr->arguments();
   2415     int arg_count = args->length();
   2416 
   2417     {
   2418       PreservePositionScope pos_scope(masm()->positions_recorder());
   2419       VisitForStackValue(callee);
   2420       __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
   2421       __ Push(x10);  // Reserved receiver slot.
   2422 
   2423       // Push the arguments.
   2424       for (int i = 0; i < arg_count; i++) {
   2425         VisitForStackValue(args->at(i));
   2426       }
   2427 
   2428       // Push a copy of the function (found below the arguments) and
   2429       // resolve eval.
   2430       __ Peek(x10, (arg_count + 1) * kPointerSize);
   2431       __ Push(x10);
   2432       EmitResolvePossiblyDirectEval(arg_count);
   2433 
   2434       // The runtime call returns a pair of values in x0 (function) and
   2435       // x1 (receiver). Touch up the stack with the right values.
   2436       __ PokePair(x1, x0, arg_count * kPointerSize);
   2437     }
   2438 
   2439     // Record source position for debugger.
   2440     SetSourcePosition(expr->position());
   2441 
   2442     // Call the evaluated function.
   2443     CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
   2444     __ Peek(x1, (arg_count + 1) * kXRegSize);
   2445     __ CallStub(&stub);
   2446     RecordJSReturnSite(expr);
   2447     // Restore context register.
   2448     __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
   2449     context()->DropAndPlug(1, x0);
   2450 
   2451   } else if (call_type == Call::GLOBAL_CALL) {
   2452     EmitCallWithLoadIC(expr);
   2453 
   2454   } else if (call_type == Call::LOOKUP_SLOT_CALL) {
   2455     // Call to a lookup slot (dynamically introduced variable).
   2456     VariableProxy* proxy = callee->AsVariableProxy();
   2457     Label slow, done;
   2458 
   2459     { PreservePositionScope scope(masm()->positions_recorder());
   2460       // Generate code for loading from variables potentially shadowed
   2461       // by eval-introduced variables.
   2462       EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
   2463     }
   2464 
   2465     __ Bind(&slow);
   2466     // Call the runtime to find the function to call (returned in x0)
   2467     // and the object holding it (returned in x1).
   2468     __ Push(context_register());
   2469     __ Mov(x10, Operand(proxy->name()));
   2470     __ Push(x10);
   2471     __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2);
   2472     __ Push(x0, x1);  // Receiver, function.
   2473 
   2474     // If fast case code has been generated, emit code to push the
   2475     // function and receiver and have the slow path jump around this
   2476     // code.
   2477     if (done.is_linked()) {
   2478       Label call;
   2479       __ B(&call);
   2480       __ Bind(&done);
   2481       // Push function.
   2482       __ Push(x0);
   2483       // The receiver is implicitly the global receiver. Indicate this
   2484       // by passing the undefined to the call function stub.
   2485       __ LoadRoot(x1, Heap::kUndefinedValueRootIndex);
   2486       __ Push(x1);
   2487       __ Bind(&call);
   2488     }
   2489 
   2490     // The receiver is either the global receiver or an object found
   2491     // by LoadContextSlot.
   2492     EmitCall(expr);
   2493   } else if (call_type == Call::PROPERTY_CALL) {
   2494     Property* property = callee->AsProperty();
   2495     { PreservePositionScope scope(masm()->positions_recorder());
   2496       VisitForStackValue(property->obj());
   2497     }
   2498     if (property->key()->IsPropertyName()) {
   2499       EmitCallWithLoadIC(expr);
   2500     } else {
   2501       EmitKeyedCallWithLoadIC(expr, property->key());
   2502     }
   2503 
   2504   } else {
   2505     ASSERT(call_type == Call::OTHER_CALL);
   2506     // Call to an arbitrary expression not handled specially above.
   2507     { PreservePositionScope scope(masm()->positions_recorder());
   2508       VisitForStackValue(callee);
   2509     }
   2510     __ LoadRoot(x1, Heap::kUndefinedValueRootIndex);
   2511     __ Push(x1);
   2512     // Emit function call.
   2513     EmitCall(expr);
   2514   }
   2515 
   2516 #ifdef DEBUG
   2517   // RecordJSReturnSite should have been called.
   2518   ASSERT(expr->return_is_recorded_);
   2519 #endif
   2520 }
   2521 
   2522 
   2523 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
   2524   Comment cmnt(masm_, "[ CallNew");
   2525   // According to ECMA-262, section 11.2.2, page 44, the function
   2526   // expression in new calls must be evaluated before the
   2527   // arguments.
   2528 
   2529   // Push constructor on the stack.  If it's not a function it's used as
   2530   // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
   2531   // ignored.
   2532   VisitForStackValue(expr->expression());
   2533 
   2534   // Push the arguments ("left-to-right") on the stack.
   2535   ZoneList<Expression*>* args = expr->arguments();
   2536   int arg_count = args->length();
   2537   for (int i = 0; i < arg_count; i++) {
   2538     VisitForStackValue(args->at(i));
   2539   }
   2540 
   2541   // Call the construct call builtin that handles allocation and
   2542   // constructor invocation.
   2543   SetSourcePosition(expr->position());
   2544 
   2545   // Load function and argument count into x1 and x0.
   2546   __ Mov(x0, arg_count);
   2547   __ Peek(x1, arg_count * kXRegSize);
   2548 
   2549   // Record call targets in unoptimized code.
   2550   if (FLAG_pretenuring_call_new) {
   2551     EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
   2552     ASSERT(expr->AllocationSiteFeedbackSlot() ==
   2553            expr->CallNewFeedbackSlot() + 1);
   2554   }
   2555 
   2556   __ LoadObject(x2, FeedbackVector());
   2557   __ Mov(x3, Smi::FromInt(expr->CallNewFeedbackSlot()));
   2558 
   2559   CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
   2560   __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
   2561   PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
   2562   context()->Plug(x0);
   2563 }
   2564 
   2565 
   2566 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
   2567   ZoneList<Expression*>* args = expr->arguments();
   2568   ASSERT(args->length() == 1);
   2569 
   2570   VisitForAccumulatorValue(args->at(0));
   2571 
   2572   Label materialize_true, materialize_false;
   2573   Label* if_true = NULL;
   2574   Label* if_false = NULL;
   2575   Label* fall_through = NULL;
   2576   context()->PrepareTest(&materialize_true, &materialize_false,
   2577                          &if_true, &if_false, &fall_through);
   2578 
   2579   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2580   __ TestAndSplit(x0, kSmiTagMask, if_true, if_false, fall_through);
   2581 
   2582   context()->Plug(if_true, if_false);
   2583 }
   2584 
   2585 
   2586 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
   2587   ZoneList<Expression*>* args = expr->arguments();
   2588   ASSERT(args->length() == 1);
   2589 
   2590   VisitForAccumulatorValue(args->at(0));
   2591 
   2592   Label materialize_true, materialize_false;
   2593   Label* if_true = NULL;
   2594   Label* if_false = NULL;
   2595   Label* fall_through = NULL;
   2596   context()->PrepareTest(&materialize_true, &materialize_false,
   2597                          &if_true, &if_false, &fall_through);
   2598 
   2599   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2600   __ TestAndSplit(x0, kSmiTagMask | (0x80000000UL << kSmiShift), if_true,
   2601                   if_false, fall_through);
   2602 
   2603   context()->Plug(if_true, if_false);
   2604 }
   2605 
   2606 
   2607 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
   2608   ZoneList<Expression*>* args = expr->arguments();
   2609   ASSERT(args->length() == 1);
   2610 
   2611   VisitForAccumulatorValue(args->at(0));
   2612 
   2613   Label materialize_true, materialize_false;
   2614   Label* if_true = NULL;
   2615   Label* if_false = NULL;
   2616   Label* fall_through = NULL;
   2617   context()->PrepareTest(&materialize_true, &materialize_false,
   2618                          &if_true, &if_false, &fall_through);
   2619 
   2620   __ JumpIfSmi(x0, if_false);
   2621   __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true);
   2622   __ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset));
   2623   // Undetectable objects behave like undefined when tested with typeof.
   2624   __ Ldrb(x11, FieldMemOperand(x10, Map::kBitFieldOffset));
   2625   __ Tbnz(x11, Map::kIsUndetectable, if_false);
   2626   __ Ldrb(x12, FieldMemOperand(x10, Map::kInstanceTypeOffset));
   2627   __ Cmp(x12, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
   2628   __ B(lt, if_false);
   2629   __ Cmp(x12, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
   2630   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2631   Split(le, if_true, if_false, fall_through);
   2632 
   2633   context()->Plug(if_true, if_false);
   2634 }
   2635 
   2636 
   2637 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
   2638   ZoneList<Expression*>* args = expr->arguments();
   2639   ASSERT(args->length() == 1);
   2640 
   2641   VisitForAccumulatorValue(args->at(0));
   2642 
   2643   Label materialize_true, materialize_false;
   2644   Label* if_true = NULL;
   2645   Label* if_false = NULL;
   2646   Label* fall_through = NULL;
   2647   context()->PrepareTest(&materialize_true, &materialize_false,
   2648                          &if_true, &if_false, &fall_through);
   2649 
   2650   __ JumpIfSmi(x0, if_false);
   2651   __ CompareObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE);
   2652   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2653   Split(ge, if_true, if_false, fall_through);
   2654 
   2655   context()->Plug(if_true, if_false);
   2656 }
   2657 
   2658 
   2659 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
   2660   ASM_LOCATION("FullCodeGenerator::EmitIsUndetectableObject");
   2661   ZoneList<Expression*>* args = expr->arguments();
   2662   ASSERT(args->length() == 1);
   2663 
   2664   VisitForAccumulatorValue(args->at(0));
   2665 
   2666   Label materialize_true, materialize_false;
   2667   Label* if_true = NULL;
   2668   Label* if_false = NULL;
   2669   Label* fall_through = NULL;
   2670   context()->PrepareTest(&materialize_true, &materialize_false,
   2671                          &if_true, &if_false, &fall_through);
   2672 
   2673   __ JumpIfSmi(x0, if_false);
   2674   __ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset));
   2675   __ Ldrb(x11, FieldMemOperand(x10, Map::kBitFieldOffset));
   2676   __ Tst(x11, 1 << Map::kIsUndetectable);
   2677   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2678   Split(ne, if_true, if_false, fall_through);
   2679 
   2680   context()->Plug(if_true, if_false);
   2681 }
   2682 
   2683 
   2684 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
   2685     CallRuntime* expr) {
   2686   ZoneList<Expression*>* args = expr->arguments();
   2687   ASSERT(args->length() == 1);
   2688   VisitForAccumulatorValue(args->at(0));
   2689 
   2690   Label materialize_true, materialize_false, skip_lookup;
   2691   Label* if_true = NULL;
   2692   Label* if_false = NULL;
   2693   Label* fall_through = NULL;
   2694   context()->PrepareTest(&materialize_true, &materialize_false,
   2695                          &if_true, &if_false, &fall_through);
   2696 
   2697   Register object = x0;
   2698   __ AssertNotSmi(object);
   2699 
   2700   Register map = x10;
   2701   Register bitfield2 = x11;
   2702   __ Ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
   2703   __ Ldrb(bitfield2, FieldMemOperand(map, Map::kBitField2Offset));
   2704   __ Tbnz(bitfield2, Map::kStringWrapperSafeForDefaultValueOf, &skip_lookup);
   2705 
   2706   // Check for fast case object. Generate false result for slow case object.
   2707   Register props = x12;
   2708   Register props_map = x12;
   2709   Register hash_table_map = x13;
   2710   __ Ldr(props, FieldMemOperand(object, JSObject::kPropertiesOffset));
   2711   __ Ldr(props_map, FieldMemOperand(props, HeapObject::kMapOffset));
   2712   __ LoadRoot(hash_table_map, Heap::kHashTableMapRootIndex);
   2713   __ Cmp(props_map, hash_table_map);
   2714   __ B(eq, if_false);
   2715 
   2716   // Look for valueOf name in the descriptor array, and indicate false if found.
   2717   // Since we omit an enumeration index check, if it is added via a transition
   2718   // that shares its descriptor array, this is a false positive.
   2719   Label loop, done;
   2720 
   2721   // Skip loop if no descriptors are valid.
   2722   Register descriptors = x12;
   2723   Register descriptors_length = x13;
   2724   __ NumberOfOwnDescriptors(descriptors_length, map);
   2725   __ Cbz(descriptors_length, &done);
   2726 
   2727   __ LoadInstanceDescriptors(map, descriptors);
   2728 
   2729   // Calculate the end of the descriptor array.
   2730   Register descriptors_end = x14;
   2731   __ Mov(x15, DescriptorArray::kDescriptorSize);
   2732   __ Mul(descriptors_length, descriptors_length, x15);
   2733   // Calculate location of the first key name.
   2734   __ Add(descriptors, descriptors,
   2735          DescriptorArray::kFirstOffset - kHeapObjectTag);
   2736   // Calculate the end of the descriptor array.
   2737   __ Add(descriptors_end, descriptors,
   2738          Operand(descriptors_length, LSL, kPointerSizeLog2));
   2739 
   2740   // Loop through all the keys in the descriptor array. If one of these is the
   2741   // string "valueOf" the result is false.
   2742   Register valueof_string = x1;
   2743   int descriptor_size = DescriptorArray::kDescriptorSize * kPointerSize;
   2744   __ Mov(valueof_string, Operand(isolate()->factory()->value_of_string()));
   2745   __ Bind(&loop);
   2746   __ Ldr(x15, MemOperand(descriptors, descriptor_size, PostIndex));
   2747   __ Cmp(x15, valueof_string);
   2748   __ B(eq, if_false);
   2749   __ Cmp(descriptors, descriptors_end);
   2750   __ B(ne, &loop);
   2751 
   2752   __ Bind(&done);
   2753 
   2754   // Set the bit in the map to indicate that there is no local valueOf field.
   2755   __ Ldrb(x2, FieldMemOperand(map, Map::kBitField2Offset));
   2756   __ Orr(x2, x2, 1 << Map::kStringWrapperSafeForDefaultValueOf);
   2757   __ Strb(x2, FieldMemOperand(map, Map::kBitField2Offset));
   2758 
   2759   __ Bind(&skip_lookup);
   2760 
   2761   // If a valueOf property is not found on the object check that its prototype
   2762   // is the unmodified String prototype. If not result is false.
   2763   Register prototype = x1;
   2764   Register global_idx = x2;
   2765   Register native_context = x2;
   2766   Register string_proto = x3;
   2767   Register proto_map = x4;
   2768   __ Ldr(prototype, FieldMemOperand(map, Map::kPrototypeOffset));
   2769   __ JumpIfSmi(prototype, if_false);
   2770   __ Ldr(proto_map, FieldMemOperand(prototype, HeapObject::kMapOffset));
   2771   __ Ldr(global_idx, GlobalObjectMemOperand());
   2772   __ Ldr(native_context,
   2773          FieldMemOperand(global_idx, GlobalObject::kNativeContextOffset));
   2774   __ Ldr(string_proto,
   2775          ContextMemOperand(native_context,
   2776                            Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
   2777   __ Cmp(proto_map, string_proto);
   2778 
   2779   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2780   Split(eq, if_true, if_false, fall_through);
   2781 
   2782   context()->Plug(if_true, if_false);
   2783 }
   2784 
   2785 
   2786 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
   2787   ZoneList<Expression*>* args = expr->arguments();
   2788   ASSERT(args->length() == 1);
   2789 
   2790   VisitForAccumulatorValue(args->at(0));
   2791 
   2792   Label materialize_true, materialize_false;
   2793   Label* if_true = NULL;
   2794   Label* if_false = NULL;
   2795   Label* fall_through = NULL;
   2796   context()->PrepareTest(&materialize_true, &materialize_false,
   2797                          &if_true, &if_false, &fall_through);
   2798 
   2799   __ JumpIfSmi(x0, if_false);
   2800   __ CompareObjectType(x0, x10, x11, JS_FUNCTION_TYPE);
   2801   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2802   Split(eq, if_true, if_false, fall_through);
   2803 
   2804   context()->Plug(if_true, if_false);
   2805 }
   2806 
   2807 
   2808 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
   2809   ZoneList<Expression*>* args = expr->arguments();
   2810   ASSERT(args->length() == 1);
   2811 
   2812   VisitForAccumulatorValue(args->at(0));
   2813 
   2814   Label materialize_true, materialize_false;
   2815   Label* if_true = NULL;
   2816   Label* if_false = NULL;
   2817   Label* fall_through = NULL;
   2818   context()->PrepareTest(&materialize_true, &materialize_false,
   2819                          &if_true, &if_false, &fall_through);
   2820 
   2821   // Only a HeapNumber can be -0.0, so return false if we have something else.
   2822   __ CheckMap(x0, x1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
   2823 
   2824   // Test the bit pattern.
   2825   __ Ldr(x10, FieldMemOperand(x0, HeapNumber::kValueOffset));
   2826   __ Cmp(x10, 1);   // Set V on 0x8000000000000000.
   2827 
   2828   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2829   Split(vs, if_true, if_false, fall_through);
   2830 
   2831   context()->Plug(if_true, if_false);
   2832 }
   2833 
   2834 
   2835 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
   2836   ZoneList<Expression*>* args = expr->arguments();
   2837   ASSERT(args->length() == 1);
   2838 
   2839   VisitForAccumulatorValue(args->at(0));
   2840 
   2841   Label materialize_true, materialize_false;
   2842   Label* if_true = NULL;
   2843   Label* if_false = NULL;
   2844   Label* fall_through = NULL;
   2845   context()->PrepareTest(&materialize_true, &materialize_false,
   2846                          &if_true, &if_false, &fall_through);
   2847 
   2848   __ JumpIfSmi(x0, if_false);
   2849   __ CompareObjectType(x0, x10, x11, JS_ARRAY_TYPE);
   2850   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2851   Split(eq, if_true, if_false, fall_through);
   2852 
   2853   context()->Plug(if_true, if_false);
   2854 }
   2855 
   2856 
   2857 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
   2858   ZoneList<Expression*>* args = expr->arguments();
   2859   ASSERT(args->length() == 1);
   2860 
   2861   VisitForAccumulatorValue(args->at(0));
   2862 
   2863   Label materialize_true, materialize_false;
   2864   Label* if_true = NULL;
   2865   Label* if_false = NULL;
   2866   Label* fall_through = NULL;
   2867   context()->PrepareTest(&materialize_true, &materialize_false,
   2868                          &if_true, &if_false, &fall_through);
   2869 
   2870   __ JumpIfSmi(x0, if_false);
   2871   __ CompareObjectType(x0, x10, x11, JS_REGEXP_TYPE);
   2872   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2873   Split(eq, if_true, if_false, fall_through);
   2874 
   2875   context()->Plug(if_true, if_false);
   2876 }
   2877 
   2878 
   2879 
   2880 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
   2881   ASSERT(expr->arguments()->length() == 0);
   2882 
   2883   Label materialize_true, materialize_false;
   2884   Label* if_true = NULL;
   2885   Label* if_false = NULL;
   2886   Label* fall_through = NULL;
   2887   context()->PrepareTest(&materialize_true, &materialize_false,
   2888                          &if_true, &if_false, &fall_through);
   2889 
   2890   // Get the frame pointer for the calling frame.
   2891   __ Ldr(x2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
   2892 
   2893   // Skip the arguments adaptor frame if it exists.
   2894   Label check_frame_marker;
   2895   __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kContextOffset));
   2896   __ Cmp(x1, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
   2897   __ B(ne, &check_frame_marker);
   2898   __ Ldr(x2, MemOperand(x2, StandardFrameConstants::kCallerFPOffset));
   2899 
   2900   // Check the marker in the calling frame.
   2901   __ Bind(&check_frame_marker);
   2902   __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kMarkerOffset));
   2903   __ Cmp(x1, Smi::FromInt(StackFrame::CONSTRUCT));
   2904   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2905   Split(eq, if_true, if_false, fall_through);
   2906 
   2907   context()->Plug(if_true, if_false);
   2908 }
   2909 
   2910 
   2911 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
   2912   ZoneList<Expression*>* args = expr->arguments();
   2913   ASSERT(args->length() == 2);
   2914 
   2915   // Load the two objects into registers and perform the comparison.
   2916   VisitForStackValue(args->at(0));
   2917   VisitForAccumulatorValue(args->at(1));
   2918 
   2919   Label materialize_true, materialize_false;
   2920   Label* if_true = NULL;
   2921   Label* if_false = NULL;
   2922   Label* fall_through = NULL;
   2923   context()->PrepareTest(&materialize_true, &materialize_false,
   2924                          &if_true, &if_false, &fall_through);
   2925 
   2926   __ Pop(x1);
   2927   __ Cmp(x0, x1);
   2928   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2929   Split(eq, if_true, if_false, fall_through);
   2930 
   2931   context()->Plug(if_true, if_false);
   2932 }
   2933 
   2934 
   2935 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
   2936   ZoneList<Expression*>* args = expr->arguments();
   2937   ASSERT(args->length() == 1);
   2938 
   2939   // ArgumentsAccessStub expects the key in x1.
   2940   VisitForAccumulatorValue(args->at(0));
   2941   __ Mov(x1, x0);
   2942   __ Mov(x0, Smi::FromInt(info_->scope()->num_parameters()));
   2943   ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
   2944   __ CallStub(&stub);
   2945   context()->Plug(x0);
   2946 }
   2947 
   2948 
   2949 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
   2950   ASSERT(expr->arguments()->length() == 0);
   2951   Label exit;
   2952   // Get the number of formal parameters.
   2953   __ Mov(x0, Smi::FromInt(info_->scope()->num_parameters()));
   2954 
   2955   // Check if the calling frame is an arguments adaptor frame.
   2956   __ Ldr(x12, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
   2957   __ Ldr(x13, MemOperand(x12, StandardFrameConstants::kContextOffset));
   2958   __ Cmp(x13, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
   2959   __ B(ne, &exit);
   2960 
   2961   // Arguments adaptor case: Read the arguments length from the
   2962   // adaptor frame.
   2963   __ Ldr(x0, MemOperand(x12, ArgumentsAdaptorFrameConstants::kLengthOffset));
   2964 
   2965   __ Bind(&exit);
   2966   context()->Plug(x0);
   2967 }
   2968 
   2969 
   2970 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
   2971   ASM_LOCATION("FullCodeGenerator::EmitClassOf");
   2972   ZoneList<Expression*>* args = expr->arguments();
   2973   ASSERT(args->length() == 1);
   2974   Label done, null, function, non_function_constructor;
   2975 
   2976   VisitForAccumulatorValue(args->at(0));
   2977 
   2978   // If the object is a smi, we return null.
   2979   __ JumpIfSmi(x0, &null);
   2980 
   2981   // Check that the object is a JS object but take special care of JS
   2982   // functions to make sure they have 'Function' as their class.
   2983   // Assume that there are only two callable types, and one of them is at
   2984   // either end of the type range for JS object types. Saves extra comparisons.
   2985   STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
   2986   __ CompareObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE);
   2987   // x10: object's map.
   2988   // x11: object's type.
   2989   __ B(lt, &null);
   2990   STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
   2991                 FIRST_SPEC_OBJECT_TYPE + 1);
   2992   __ B(eq, &function);
   2993 
   2994   __ Cmp(x11, LAST_SPEC_OBJECT_TYPE);
   2995   STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
   2996                 LAST_SPEC_OBJECT_TYPE - 1);
   2997   __ B(eq, &function);
   2998   // Assume that there is no larger type.
   2999   STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
   3000 
   3001   // Check if the constructor in the map is a JS function.
   3002   __ Ldr(x12, FieldMemOperand(x10, Map::kConstructorOffset));
   3003   __ JumpIfNotObjectType(x12, x13, x14, JS_FUNCTION_TYPE,
   3004                          &non_function_constructor);
   3005 
   3006   // x12 now contains the constructor function. Grab the
   3007   // instance class name from there.
   3008   __ Ldr(x13, FieldMemOperand(x12, JSFunction::kSharedFunctionInfoOffset));
   3009   __ Ldr(x0,
   3010          FieldMemOperand(x13, SharedFunctionInfo::kInstanceClassNameOffset));
   3011   __ B(&done);
   3012 
   3013   // Functions have class 'Function'.
   3014   __ Bind(&function);
   3015   __ LoadRoot(x0, Heap::kfunction_class_stringRootIndex);
   3016   __ B(&done);
   3017 
   3018   // Objects with a non-function constructor have class 'Object'.
   3019   __ Bind(&non_function_constructor);
   3020   __ LoadRoot(x0, Heap::kObject_stringRootIndex);
   3021   __ B(&done);
   3022 
   3023   // Non-JS objects have class null.
   3024   __ Bind(&null);
   3025   __ LoadRoot(x0, Heap::kNullValueRootIndex);
   3026 
   3027   // All done.
   3028   __ Bind(&done);
   3029 
   3030   context()->Plug(x0);
   3031 }
   3032 
   3033 
   3034 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
   3035   // Load the arguments on the stack and call the stub.
   3036   SubStringStub stub(isolate());
   3037   ZoneList<Expression*>* args = expr->arguments();
   3038   ASSERT(args->length() == 3);
   3039   VisitForStackValue(args->at(0));
   3040   VisitForStackValue(args->at(1));
   3041   VisitForStackValue(args->at(2));
   3042   __ CallStub(&stub);
   3043   context()->Plug(x0);
   3044 }
   3045 
   3046 
   3047 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
   3048   // Load the arguments on the stack and call the stub.
   3049   RegExpExecStub stub(isolate());
   3050   ZoneList<Expression*>* args = expr->arguments();
   3051   ASSERT(args->length() == 4);
   3052   VisitForStackValue(args->at(0));
   3053   VisitForStackValue(args->at(1));
   3054   VisitForStackValue(args->at(2));
   3055   VisitForStackValue(args->at(3));
   3056   __ CallStub(&stub);
   3057   context()->Plug(x0);
   3058 }
   3059 
   3060 
   3061 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
   3062   ASM_LOCATION("FullCodeGenerator::EmitValueOf");
   3063   ZoneList<Expression*>* args = expr->arguments();
   3064   ASSERT(args->length() == 1);
   3065   VisitForAccumulatorValue(args->at(0));  // Load the object.
   3066 
   3067   Label done;
   3068   // If the object is a smi return the object.
   3069   __ JumpIfSmi(x0, &done);
   3070   // If the object is not a value type, return the object.
   3071   __ JumpIfNotObjectType(x0, x10, x11, JS_VALUE_TYPE, &done);
   3072   __ Ldr(x0, FieldMemOperand(x0, JSValue::kValueOffset));
   3073 
   3074   __ Bind(&done);
   3075   context()->Plug(x0);
   3076 }
   3077 
   3078 
   3079 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
   3080   ZoneList<Expression*>* args = expr->arguments();
   3081   ASSERT(args->length() == 2);
   3082   ASSERT_NE(NULL, args->at(1)->AsLiteral());
   3083   Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
   3084 
   3085   VisitForAccumulatorValue(args->at(0));  // Load the object.
   3086 
   3087   Label runtime, done, not_date_object;
   3088   Register object = x0;
   3089   Register result = x0;
   3090   Register stamp_addr = x10;
   3091   Register stamp_cache = x11;
   3092 
   3093   __ JumpIfSmi(object, &not_date_object);
   3094   __ JumpIfNotObjectType(object, x10, x10, JS_DATE_TYPE, &not_date_object);
   3095 
   3096   if (index->value() == 0) {
   3097     __ Ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
   3098     __ B(&done);
   3099   } else {
   3100     if (index->value() < JSDate::kFirstUncachedField) {
   3101       ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
   3102       __ Mov(x10, stamp);
   3103       __ Ldr(stamp_addr, MemOperand(x10));
   3104       __ Ldr(stamp_cache, FieldMemOperand(object, JSDate::kCacheStampOffset));
   3105       __ Cmp(stamp_addr, stamp_cache);
   3106       __ B(ne, &runtime);
   3107       __ Ldr(result, FieldMemOperand(object, JSDate::kValueOffset +
   3108                                              kPointerSize * index->value()));
   3109       __ B(&done);
   3110     }
   3111 
   3112     __ Bind(&runtime);
   3113     __ Mov(x1, index);
   3114     __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
   3115     __ B(&done);
   3116   }
   3117 
   3118   __ Bind(&not_date_object);
   3119   __ CallRuntime(Runtime::kHiddenThrowNotDateError, 0);
   3120   __ Bind(&done);
   3121   context()->Plug(x0);
   3122 }
   3123 
   3124 
   3125 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
   3126   ZoneList<Expression*>* args = expr->arguments();
   3127   ASSERT_EQ(3, args->length());
   3128 
   3129   Register string = x0;
   3130   Register index = x1;
   3131   Register value = x2;
   3132   Register scratch = x10;
   3133 
   3134   VisitForStackValue(args->at(1));  // index
   3135   VisitForStackValue(args->at(2));  // value
   3136   VisitForAccumulatorValue(args->at(0));  // string
   3137   __ Pop(value, index);
   3138 
   3139   if (FLAG_debug_code) {
   3140     __ AssertSmi(value, kNonSmiValue);
   3141     __ AssertSmi(index, kNonSmiIndex);
   3142     static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
   3143     __ EmitSeqStringSetCharCheck(string, index, kIndexIsSmi, scratch,
   3144                                  one_byte_seq_type);
   3145   }
   3146 
   3147   __ Add(scratch, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
   3148   __ SmiUntag(value);
   3149   __ SmiUntag(index);
   3150   __ Strb(value, MemOperand(scratch, index));
   3151   context()->Plug(string);
   3152 }
   3153 
   3154 
   3155 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
   3156   ZoneList<Expression*>* args = expr->arguments();
   3157   ASSERT_EQ(3, args->length());
   3158 
   3159   Register string = x0;
   3160   Register index = x1;
   3161   Register value = x2;
   3162   Register scratch = x10;
   3163 
   3164   VisitForStackValue(args->at(1));  // index
   3165   VisitForStackValue(args->at(2));  // value
   3166   VisitForAccumulatorValue(args->at(0));  // string
   3167   __ Pop(value, index);
   3168 
   3169   if (FLAG_debug_code) {
   3170     __ AssertSmi(value, kNonSmiValue);
   3171     __ AssertSmi(index, kNonSmiIndex);
   3172     static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
   3173     __ EmitSeqStringSetCharCheck(string, index, kIndexIsSmi, scratch,
   3174                                  two_byte_seq_type);
   3175   }
   3176 
   3177   __ Add(scratch, string, SeqTwoByteString::kHeaderSize - kHeapObjectTag);
   3178   __ SmiUntag(value);
   3179   __ SmiUntag(index);
   3180   __ Strh(value, MemOperand(scratch, index, LSL, 1));
   3181   context()->Plug(string);
   3182 }
   3183 
   3184 
   3185 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
   3186   // Load the arguments on the stack and call the MathPow stub.
   3187   ZoneList<Expression*>* args = expr->arguments();
   3188   ASSERT(args->length() == 2);
   3189   VisitForStackValue(args->at(0));
   3190   VisitForStackValue(args->at(1));
   3191   MathPowStub stub(isolate(), MathPowStub::ON_STACK);
   3192   __ CallStub(&stub);
   3193   context()->Plug(x0);
   3194 }
   3195 
   3196 
   3197 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
   3198   ZoneList<Expression*>* args = expr->arguments();
   3199   ASSERT(args->length() == 2);
   3200   VisitForStackValue(args->at(0));  // Load the object.
   3201   VisitForAccumulatorValue(args->at(1));  // Load the value.
   3202   __ Pop(x1);
   3203   // x0 = value.
   3204   // x1 = object.
   3205 
   3206   Label done;
   3207   // If the object is a smi, return the value.
   3208   __ JumpIfSmi(x1, &done);
   3209 
   3210   // If the object is not a value type, return the value.
   3211   __ JumpIfNotObjectType(x1, x10, x11, JS_VALUE_TYPE, &done);
   3212 
   3213   // Store the value.
   3214   __ Str(x0, FieldMemOperand(x1, JSValue::kValueOffset));
   3215   // Update the write barrier. Save the value as it will be
   3216   // overwritten by the write barrier code and is needed afterward.
   3217   __ Mov(x10, x0);
   3218   __ RecordWriteField(
   3219       x1, JSValue::kValueOffset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
   3220 
   3221   __ Bind(&done);
   3222   context()->Plug(x0);
   3223 }
   3224 
   3225 
   3226 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
   3227   ZoneList<Expression*>* args = expr->arguments();
   3228   ASSERT_EQ(args->length(), 1);
   3229 
   3230   // Load the argument into x0 and call the stub.
   3231   VisitForAccumulatorValue(args->at(0));
   3232 
   3233   NumberToStringStub stub(isolate());
   3234   __ CallStub(&stub);
   3235   context()->Plug(x0);
   3236 }
   3237 
   3238 
   3239 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
   3240   ZoneList<Expression*>* args = expr->arguments();
   3241   ASSERT(args->length() == 1);
   3242 
   3243   VisitForAccumulatorValue(args->at(0));
   3244 
   3245   Label done;
   3246   Register code = x0;
   3247   Register result = x1;
   3248 
   3249   StringCharFromCodeGenerator generator(code, result);
   3250   generator.GenerateFast(masm_);
   3251   __ B(&done);
   3252 
   3253   NopRuntimeCallHelper call_helper;
   3254   generator.GenerateSlow(masm_, call_helper);
   3255 
   3256   __ Bind(&done);
   3257   context()->Plug(result);
   3258 }
   3259 
   3260 
   3261 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
   3262   ZoneList<Expression*>* args = expr->arguments();
   3263   ASSERT(args->length() == 2);
   3264 
   3265   VisitForStackValue(args->at(0));
   3266   VisitForAccumulatorValue(args->at(1));
   3267 
   3268   Register object = x1;
   3269   Register index = x0;
   3270   Register result = x3;
   3271 
   3272   __ Pop(object);
   3273 
   3274   Label need_conversion;
   3275   Label index_out_of_range;
   3276   Label done;
   3277   StringCharCodeAtGenerator generator(object,
   3278                                       index,
   3279                                       result,
   3280                                       &need_conversion,
   3281                                       &need_conversion,
   3282                                       &index_out_of_range,
   3283                                       STRING_INDEX_IS_NUMBER);
   3284   generator.GenerateFast(masm_);
   3285   __ B(&done);
   3286 
   3287   __ Bind(&index_out_of_range);
   3288   // When the index is out of range, the spec requires us to return NaN.
   3289   __ LoadRoot(result, Heap::kNanValueRootIndex);
   3290   __ B(&done);
   3291 
   3292   __ Bind(&need_conversion);
   3293   // Load the undefined value into the result register, which will
   3294   // trigger conversion.
   3295   __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
   3296   __ B(&done);
   3297 
   3298   NopRuntimeCallHelper call_helper;
   3299   generator.GenerateSlow(masm_, call_helper);
   3300 
   3301   __ Bind(&done);
   3302   context()->Plug(result);
   3303 }
   3304 
   3305 
   3306 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
   3307   ZoneList<Expression*>* args = expr->arguments();
   3308   ASSERT(args->length() == 2);
   3309 
   3310   VisitForStackValue(args->at(0));
   3311   VisitForAccumulatorValue(args->at(1));
   3312 
   3313   Register object = x1;
   3314   Register index = x0;
   3315   Register result = x0;
   3316 
   3317   __ Pop(object);
   3318 
   3319   Label need_conversion;
   3320   Label index_out_of_range;
   3321   Label done;
   3322   StringCharAtGenerator generator(object,
   3323                                   index,
   3324                                   x3,
   3325                                   result,
   3326                                   &need_conversion,
   3327                                   &need_conversion,
   3328                                   &index_out_of_range,
   3329                                   STRING_INDEX_IS_NUMBER);
   3330   generator.GenerateFast(masm_);
   3331   __ B(&done);
   3332 
   3333   __ Bind(&index_out_of_range);
   3334   // When the index is out of range, the spec requires us to return
   3335   // the empty string.
   3336   __ LoadRoot(result, Heap::kempty_stringRootIndex);
   3337   __ B(&done);
   3338 
   3339   __ Bind(&need_conversion);
   3340   // Move smi zero into the result register, which will trigger conversion.
   3341   __ Mov(result, Smi::FromInt(0));
   3342   __ B(&done);
   3343 
   3344   NopRuntimeCallHelper call_helper;
   3345   generator.GenerateSlow(masm_, call_helper);
   3346 
   3347   __ Bind(&done);
   3348   context()->Plug(result);
   3349 }
   3350 
   3351 
   3352 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
   3353   ASM_LOCATION("FullCodeGenerator::EmitStringAdd");
   3354   ZoneList<Expression*>* args = expr->arguments();
   3355   ASSERT_EQ(2, args->length());
   3356 
   3357   VisitForStackValue(args->at(0));
   3358   VisitForAccumulatorValue(args->at(1));
   3359 
   3360   __ Pop(x1);
   3361   StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
   3362   __ CallStub(&stub);
   3363 
   3364   context()->Plug(x0);
   3365 }
   3366 
   3367 
   3368 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
   3369   ZoneList<Expression*>* args = expr->arguments();
   3370   ASSERT_EQ(2, args->length());
   3371   VisitForStackValue(args->at(0));
   3372   VisitForStackValue(args->at(1));
   3373 
   3374   StringCompareStub stub(isolate());
   3375   __ CallStub(&stub);
   3376   context()->Plug(x0);
   3377 }
   3378 
   3379 
   3380 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
   3381   ASM_LOCATION("FullCodeGenerator::EmitCallFunction");
   3382   ZoneList<Expression*>* args = expr->arguments();
   3383   ASSERT(args->length() >= 2);
   3384 
   3385   int arg_count = args->length() - 2;  // 2 ~ receiver and function.
   3386   for (int i = 0; i < arg_count + 1; i++) {
   3387     VisitForStackValue(args->at(i));
   3388   }
   3389   VisitForAccumulatorValue(args->last());  // Function.
   3390 
   3391   Label runtime, done;
   3392   // Check for non-function argument (including proxy).
   3393   __ JumpIfSmi(x0, &runtime);
   3394   __ JumpIfNotObjectType(x0, x1, x1, JS_FUNCTION_TYPE, &runtime);
   3395 
   3396   // InvokeFunction requires the function in x1. Move it in there.
   3397   __ Mov(x1, x0);
   3398   ParameterCount count(arg_count);
   3399   __ InvokeFunction(x1, count, CALL_FUNCTION, NullCallWrapper());
   3400   __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
   3401   __ B(&done);
   3402 
   3403   __ Bind(&runtime);
   3404   __ Push(x0);
   3405   __ CallRuntime(Runtime::kCall, args->length());
   3406   __ Bind(&done);
   3407 
   3408   context()->Plug(x0);
   3409 }
   3410 
   3411 
   3412 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
   3413   RegExpConstructResultStub stub(isolate());
   3414   ZoneList<Expression*>* args = expr->arguments();
   3415   ASSERT(args->length() == 3);
   3416   VisitForStackValue(args->at(0));
   3417   VisitForStackValue(args->at(1));
   3418   VisitForAccumulatorValue(args->at(2));
   3419   __ Pop(x1, x2);
   3420   __ CallStub(&stub);
   3421   context()->Plug(x0);
   3422 }
   3423 
   3424 
   3425 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
   3426   ZoneList<Expression*>* args = expr->arguments();
   3427   ASSERT_EQ(2, args->length());
   3428   ASSERT_NE(NULL, args->at(0)->AsLiteral());
   3429   int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
   3430 
   3431   Handle<FixedArray> jsfunction_result_caches(
   3432       isolate()->native_context()->jsfunction_result_caches());
   3433   if (jsfunction_result_caches->length() <= cache_id) {
   3434     __ Abort(kAttemptToUseUndefinedCache);
   3435     __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
   3436     context()->Plug(x0);
   3437     return;
   3438   }
   3439 
   3440   VisitForAccumulatorValue(args->at(1));
   3441 
   3442   Register key = x0;
   3443   Register cache = x1;
   3444   __ Ldr(cache, GlobalObjectMemOperand());
   3445   __ Ldr(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
   3446   __ Ldr(cache, ContextMemOperand(cache,
   3447                                   Context::JSFUNCTION_RESULT_CACHES_INDEX));
   3448   __ Ldr(cache,
   3449          FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
   3450 
   3451   Label done;
   3452   __ Ldrsw(x2, UntagSmiFieldMemOperand(cache,
   3453                                        JSFunctionResultCache::kFingerOffset));
   3454   __ Add(x3, cache, FixedArray::kHeaderSize - kHeapObjectTag);
   3455   __ Add(x3, x3, Operand(x2, LSL, kPointerSizeLog2));
   3456 
   3457   // Load the key and data from the cache.
   3458   __ Ldp(x2, x3, MemOperand(x3));
   3459 
   3460   __ Cmp(key, x2);
   3461   __ CmovX(x0, x3, eq);
   3462   __ B(eq, &done);
   3463 
   3464   // Call runtime to perform the lookup.
   3465   __ Push(cache, key);
   3466   __ CallRuntime(Runtime::kHiddenGetFromCache, 2);
   3467 
   3468   __ Bind(&done);
   3469   context()->Plug(x0);
   3470 }
   3471 
   3472 
   3473 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
   3474   ZoneList<Expression*>* args = expr->arguments();
   3475   VisitForAccumulatorValue(args->at(0));
   3476 
   3477   Label materialize_true, materialize_false;
   3478   Label* if_true = NULL;
   3479   Label* if_false = NULL;
   3480   Label* fall_through = NULL;
   3481   context()->PrepareTest(&materialize_true, &materialize_false,
   3482                          &if_true, &if_false, &fall_through);
   3483 
   3484   __ Ldr(x10, FieldMemOperand(x0, String::kHashFieldOffset));
   3485   __ Tst(x10, String::kContainsCachedArrayIndexMask);
   3486   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   3487   Split(eq, if_true, if_false, fall_through);
   3488 
   3489   context()->Plug(if_true, if_false);
   3490 }
   3491 
   3492 
   3493 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
   3494   ZoneList<Expression*>* args = expr->arguments();
   3495   ASSERT(args->length() == 1);
   3496   VisitForAccumulatorValue(args->at(0));
   3497 
   3498   __ AssertString(x0);
   3499 
   3500   __ Ldr(x10, FieldMemOperand(x0, String::kHashFieldOffset));
   3501   __ IndexFromHash(x10, x0);
   3502 
   3503   context()->Plug(x0);
   3504 }
   3505 
   3506 
   3507 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
   3508   ASM_LOCATION("FullCodeGenerator::EmitFastAsciiArrayJoin");
   3509 
   3510   ZoneList<Expression*>* args = expr->arguments();
   3511   ASSERT(args->length() == 2);
   3512   VisitForStackValue(args->at(1));
   3513   VisitForAccumulatorValue(args->at(0));
   3514 
   3515   Register array = x0;
   3516   Register result = x0;
   3517   Register elements = x1;
   3518   Register element = x2;
   3519   Register separator = x3;
   3520   Register array_length = x4;
   3521   Register result_pos = x5;
   3522   Register map = x6;
   3523   Register string_length = x10;
   3524   Register elements_end = x11;
   3525   Register string = x12;
   3526   Register scratch1 = x13;
   3527   Register scratch2 = x14;
   3528   Register scratch3 = x7;
   3529   Register separator_length = x15;
   3530 
   3531   Label bailout, done, one_char_separator, long_separator,
   3532       non_trivial_array, not_size_one_array, loop,
   3533       empty_separator_loop, one_char_separator_loop,
   3534       one_char_separator_loop_entry, long_separator_loop;
   3535 
   3536   // The separator operand is on the stack.
   3537   __ Pop(separator);
   3538 
   3539   // Check that the array is a JSArray.
   3540   __ JumpIfSmi(array, &bailout);
   3541   __ JumpIfNotObjectType(array, map, scratch1, JS_ARRAY_TYPE, &bailout);
   3542 
   3543   // Check that the array has fast elements.
   3544   __ CheckFastElements(map, scratch1, &bailout);
   3545 
   3546   // If the array has length zero, return the empty string.
   3547   // Load and untag the length of the array.
   3548   // It is an unsigned value, so we can skip sign extension.
   3549   // We assume little endianness.
   3550   __ Ldrsw(array_length,
   3551            UntagSmiFieldMemOperand(array, JSArray::kLengthOffset));
   3552   __ Cbnz(array_length, &non_trivial_array);
   3553   __ LoadRoot(result, Heap::kempty_stringRootIndex);
   3554   __ B(&done);
   3555 
   3556   __ Bind(&non_trivial_array);
   3557   // Get the FixedArray containing array's elements.
   3558   __ Ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
   3559 
   3560   // Check that all array elements are sequential ASCII strings, and
   3561   // accumulate the sum of their lengths.
   3562   __ Mov(string_length, 0);
   3563   __ Add(element, elements, FixedArray::kHeaderSize - kHeapObjectTag);
   3564   __ Add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
   3565   // Loop condition: while (element < elements_end).
   3566   // Live values in registers:
   3567   //   elements: Fixed array of strings.
   3568   //   array_length: Length of the fixed array of strings (not smi)
   3569   //   separator: Separator string
   3570   //   string_length: Accumulated sum of string lengths (not smi).
   3571   //   element: Current array element.
   3572   //   elements_end: Array end.
   3573   if (FLAG_debug_code) {
   3574     __ Cmp(array_length, 0);
   3575     __ Assert(gt, kNoEmptyArraysHereInEmitFastAsciiArrayJoin);
   3576   }
   3577   __ Bind(&loop);
   3578   __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
   3579   __ JumpIfSmi(string, &bailout);
   3580   __ Ldr(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
   3581   __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
   3582   __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
   3583   __ Ldrsw(scratch1,
   3584            UntagSmiFieldMemOperand(string, SeqOneByteString::kLengthOffset));
   3585   __ Adds(string_length, string_length, scratch1);
   3586   __ B(vs, &bailout);
   3587   __ Cmp(element, elements_end);
   3588   __ B(lt, &loop);
   3589 
   3590   // If array_length is 1, return elements[0], a string.
   3591   __ Cmp(array_length, 1);
   3592   __ B(ne, &not_size_one_array);
   3593   __ Ldr(result, FieldMemOperand(elements, FixedArray::kHeaderSize));
   3594   __ B(&done);
   3595 
   3596   __ Bind(&not_size_one_array);
   3597 
   3598   // Live values in registers:
   3599   //   separator: Separator string
   3600   //   array_length: Length of the array (not smi).
   3601   //   string_length: Sum of string lengths (not smi).
   3602   //   elements: FixedArray of strings.
   3603 
   3604   // Check that the separator is a flat ASCII string.
   3605   __ JumpIfSmi(separator, &bailout);
   3606   __ Ldr(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
   3607   __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
   3608   __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
   3609 
   3610   // Add (separator length times array_length) - separator length to the
   3611   // string_length to get the length of the result string.
   3612   // Load the separator length as untagged.
   3613   // We assume little endianness, and that the length is positive.
   3614   __ Ldrsw(separator_length,
   3615            UntagSmiFieldMemOperand(separator,
   3616                                    SeqOneByteString::kLengthOffset));
   3617   __ Sub(string_length, string_length, separator_length);
   3618   __ Umaddl(string_length, array_length.W(), separator_length.W(),
   3619             string_length);
   3620 
   3621   // Get first element in the array.
   3622   __ Add(element, elements, FixedArray::kHeaderSize - kHeapObjectTag);
   3623   // Live values in registers:
   3624   //   element: First array element
   3625   //   separator: Separator string
   3626   //   string_length: Length of result string (not smi)
   3627   //   array_length: Length of the array (not smi).
   3628   __ AllocateAsciiString(result, string_length, scratch1, scratch2, scratch3,
   3629                          &bailout);
   3630 
   3631   // Prepare for looping. Set up elements_end to end of the array. Set
   3632   // result_pos to the position of the result where to write the first
   3633   // character.
   3634   // TODO(all): useless unless AllocateAsciiString trashes the register.
   3635   __ Add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
   3636   __ Add(result_pos, result, SeqOneByteString::kHeaderSize - kHeapObjectTag);
   3637 
   3638   // Check the length of the separator.
   3639   __ Cmp(separator_length, 1);
   3640   __ B(eq, &one_char_separator);
   3641   __ B(gt, &long_separator);
   3642 
   3643   // Empty separator case
   3644   __ Bind(&empty_separator_loop);
   3645   // Live values in registers:
   3646   //   result_pos: the position to which we are currently copying characters.
   3647   //   element: Current array element.
   3648   //   elements_end: Array end.
   3649 
   3650   // Copy next array element to the result.
   3651   __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
   3652   __ Ldrsw(string_length,
   3653            UntagSmiFieldMemOperand(string, String::kLengthOffset));
   3654   __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
   3655   __ CopyBytes(result_pos, string, string_length, scratch1);
   3656   __ Cmp(element, elements_end);
   3657   __ B(lt, &empty_separator_loop);  // End while (element < elements_end).
   3658   __ B(&done);
   3659 
   3660   // One-character separator case
   3661   __ Bind(&one_char_separator);
   3662   // Replace separator with its ASCII character value.
   3663   __ Ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
   3664   // Jump into the loop after the code that copies the separator, so the first
   3665   // element is not preceded by a separator
   3666   __ B(&one_char_separator_loop_entry);
   3667 
   3668   __ Bind(&one_char_separator_loop);
   3669   // Live values in registers:
   3670   //   result_pos: the position to which we are currently copying characters.
   3671   //   element: Current array element.
   3672   //   elements_end: Array end.
   3673   //   separator: Single separator ASCII char (in lower byte).
   3674 
   3675   // Copy the separator character to the result.
   3676   __ Strb(separator, MemOperand(result_pos, 1, PostIndex));
   3677 
   3678   // Copy next array element to the result.
   3679   __ Bind(&one_char_separator_loop_entry);
   3680   __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
   3681   __ Ldrsw(string_length,
   3682            UntagSmiFieldMemOperand(string, String::kLengthOffset));
   3683   __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
   3684   __ CopyBytes(result_pos, string, string_length, scratch1);
   3685   __ Cmp(element, elements_end);
   3686   __ B(lt, &one_char_separator_loop);  // End while (element < elements_end).
   3687   __ B(&done);
   3688 
   3689   // Long separator case (separator is more than one character). Entry is at the
   3690   // label long_separator below.
   3691   __ Bind(&long_separator_loop);
   3692   // Live values in registers:
   3693   //   result_pos: the position to which we are currently copying characters.
   3694   //   element: Current array element.
   3695   //   elements_end: Array end.
   3696   //   separator: Separator string.
   3697 
   3698   // Copy the separator to the result.
   3699   // TODO(all): hoist next two instructions.
   3700   __ Ldrsw(string_length,
   3701            UntagSmiFieldMemOperand(separator, String::kLengthOffset));
   3702   __ Add(string, separator, SeqOneByteString::kHeaderSize - kHeapObjectTag);
   3703   __ CopyBytes(result_pos, string, string_length, scratch1);
   3704 
   3705   __ Bind(&long_separator);
   3706   __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
   3707   __ Ldrsw(string_length,
   3708            UntagSmiFieldMemOperand(string, String::kLengthOffset));
   3709   __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
   3710   __ CopyBytes(result_pos, string, string_length, scratch1);
   3711   __ Cmp(element, elements_end);
   3712   __ B(lt, &long_separator_loop);  // End while (element < elements_end).
   3713   __ B(&done);
   3714 
   3715   __ Bind(&bailout);
   3716   // Returning undefined will force slower code to handle it.
   3717   __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
   3718   __ Bind(&done);
   3719   context()->Plug(result);
   3720 }
   3721 
   3722 
   3723 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
   3724   if (expr->function() != NULL &&
   3725       expr->function()->intrinsic_type == Runtime::INLINE) {
   3726     Comment cmnt(masm_, "[ InlineRuntimeCall");
   3727     EmitInlineRuntimeCall(expr);
   3728     return;
   3729   }
   3730 
   3731   Comment cmnt(masm_, "[ CallRunTime");
   3732   ZoneList<Expression*>* args = expr->arguments();
   3733   int arg_count = args->length();
   3734 
   3735   if (expr->is_jsruntime()) {
   3736     // Push the builtins object as the receiver.
   3737     __ Ldr(x10, GlobalObjectMemOperand());
   3738     __ Ldr(x0, FieldMemOperand(x10, GlobalObject::kBuiltinsOffset));
   3739     __ Push(x0);
   3740 
   3741     // Load the function from the receiver.
   3742     Handle<String> name = expr->name();
   3743     __ Mov(x2, Operand(name));
   3744     CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
   3745 
   3746     // Push the target function under the receiver.
   3747     __ Pop(x10);
   3748     __ Push(x0, x10);
   3749 
   3750     int arg_count = args->length();
   3751     for (int i = 0; i < arg_count; i++) {
   3752       VisitForStackValue(args->at(i));
   3753     }
   3754 
   3755     // Record source position of the IC call.
   3756     SetSourcePosition(expr->position());
   3757     CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
   3758     __ Peek(x1, (arg_count + 1) * kPointerSize);
   3759     __ CallStub(&stub);
   3760 
   3761     // Restore context register.
   3762     __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
   3763 
   3764     context()->DropAndPlug(1, x0);
   3765   } else {
   3766     // Push the arguments ("left-to-right").
   3767     for (int i = 0; i < arg_count; i++) {
   3768       VisitForStackValue(args->at(i));
   3769     }
   3770 
   3771     // Call the C runtime function.
   3772     __ CallRuntime(expr->function(), arg_count);
   3773     context()->Plug(x0);
   3774   }
   3775 }
   3776 
   3777 
   3778 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
   3779   switch (expr->op()) {
   3780     case Token::DELETE: {
   3781       Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
   3782       Property* property = expr->expression()->AsProperty();
   3783       VariableProxy* proxy = expr->expression()->AsVariableProxy();
   3784 
   3785       if (property != NULL) {
   3786         VisitForStackValue(property->obj());
   3787         VisitForStackValue(property->key());
   3788         __ Mov(x10, Smi::FromInt(strict_mode()));
   3789         __ Push(x10);
   3790         __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
   3791         context()->Plug(x0);
   3792       } else if (proxy != NULL) {
   3793         Variable* var = proxy->var();
   3794         // Delete of an unqualified identifier is disallowed in strict mode
   3795         // but "delete this" is allowed.
   3796         ASSERT(strict_mode() == SLOPPY || var->is_this());
   3797         if (var->IsUnallocated()) {
   3798           __ Ldr(x12, GlobalObjectMemOperand());
   3799           __ Mov(x11, Operand(var->name()));
   3800           __ Mov(x10, Smi::FromInt(SLOPPY));
   3801           __ Push(x12, x11, x10);
   3802           __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
   3803           context()->Plug(x0);
   3804         } else if (var->IsStackAllocated() || var->IsContextSlot()) {
   3805           // Result of deleting non-global, non-dynamic variables is false.
   3806           // The subexpression does not have side effects.
   3807           context()->Plug(var->is_this());
   3808         } else {
   3809           // Non-global variable.  Call the runtime to try to delete from the
   3810           // context where the variable was introduced.
   3811           __ Mov(x2, Operand(var->name()));
   3812           __ Push(context_register(), x2);
   3813           __ CallRuntime(Runtime::kHiddenDeleteContextSlot, 2);
   3814           context()->Plug(x0);
   3815         }
   3816       } else {
   3817         // Result of deleting non-property, non-variable reference is true.
   3818         // The subexpression may have side effects.
   3819         VisitForEffect(expr->expression());
   3820         context()->Plug(true);
   3821       }
   3822       break;
   3823       break;
   3824     }
   3825     case Token::VOID: {
   3826       Comment cmnt(masm_, "[ UnaryOperation (VOID)");
   3827       VisitForEffect(expr->expression());
   3828       context()->Plug(Heap::kUndefinedValueRootIndex);
   3829       break;
   3830     }
   3831     case Token::NOT: {
   3832       Comment cmnt(masm_, "[ UnaryOperation (NOT)");
   3833       if (context()->IsEffect()) {
   3834         // Unary NOT has no side effects so it's only necessary to visit the
   3835         // subexpression.  Match the optimizing compiler by not branching.
   3836         VisitForEffect(expr->expression());
   3837       } else if (context()->IsTest()) {
   3838         const TestContext* test = TestContext::cast(context());
   3839         // The labels are swapped for the recursive call.
   3840         VisitForControl(expr->expression(),
   3841                         test->false_label(),
   3842                         test->true_label(),
   3843                         test->fall_through());
   3844         context()->Plug(test->true_label(), test->false_label());
   3845       } else {
   3846         ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
   3847         // TODO(jbramley): This could be much more efficient using (for
   3848         // example) the CSEL instruction.
   3849         Label materialize_true, materialize_false, done;
   3850         VisitForControl(expr->expression(),
   3851                         &materialize_false,
   3852                         &materialize_true,
   3853                         &materialize_true);
   3854 
   3855         __ Bind(&materialize_true);
   3856         PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
   3857         __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
   3858         __ B(&done);
   3859 
   3860         __ Bind(&materialize_false);
   3861         PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
   3862         __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
   3863         __ B(&done);
   3864 
   3865         __ Bind(&done);
   3866         if (context()->IsStackValue()) {
   3867           __ Push(result_register());
   3868         }
   3869       }
   3870       break;
   3871     }
   3872     case Token::TYPEOF: {
   3873       Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
   3874       {
   3875         StackValueContext context(this);
   3876         VisitForTypeofValue(expr->expression());
   3877       }
   3878       __ CallRuntime(Runtime::kTypeof, 1);
   3879       context()->Plug(x0);
   3880       break;
   3881     }
   3882     default:
   3883       UNREACHABLE();
   3884   }
   3885 }
   3886 
   3887 
   3888 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
   3889   ASSERT(expr->expression()->IsValidReferenceExpression());
   3890 
   3891   Comment cmnt(masm_, "[ CountOperation");
   3892   SetSourcePosition(expr->position());
   3893 
   3894   // Expression can only be a property, a global or a (parameter or local)
   3895   // slot.
   3896   enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
   3897   LhsKind assign_type = VARIABLE;
   3898   Property* prop = expr->expression()->AsProperty();
   3899   // In case of a property we use the uninitialized expression context
   3900   // of the key to detect a named property.
   3901   if (prop != NULL) {
   3902     assign_type =
   3903         (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
   3904   }
   3905 
   3906   // Evaluate expression and get value.
   3907   if (assign_type == VARIABLE) {
   3908     ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
   3909     AccumulatorValueContext context(this);
   3910     EmitVariableLoad(expr->expression()->AsVariableProxy());
   3911   } else {
   3912     // Reserve space for result of postfix operation.
   3913     if (expr->is_postfix() && !context()->IsEffect()) {
   3914       __ Push(xzr);
   3915     }
   3916     if (assign_type == NAMED_PROPERTY) {
   3917       // Put the object both on the stack and in the accumulator.
   3918       VisitForAccumulatorValue(prop->obj());
   3919       __ Push(x0);
   3920       EmitNamedPropertyLoad(prop);
   3921     } else {
   3922       // KEYED_PROPERTY
   3923       VisitForStackValue(prop->obj());
   3924       VisitForAccumulatorValue(prop->key());
   3925       __ Peek(x1, 0);
   3926       __ Push(x0);
   3927       EmitKeyedPropertyLoad(prop);
   3928     }
   3929   }
   3930 
   3931   // We need a second deoptimization point after loading the value
   3932   // in case evaluating the property load my have a side effect.
   3933   if (assign_type == VARIABLE) {
   3934     PrepareForBailout(expr->expression(), TOS_REG);
   3935   } else {
   3936     PrepareForBailoutForId(prop->LoadId(), TOS_REG);
   3937   }
   3938 
   3939   // Inline smi case if we are in a loop.
   3940   Label stub_call, done;
   3941   JumpPatchSite patch_site(masm_);
   3942 
   3943   int count_value = expr->op() == Token::INC ? 1 : -1;
   3944   if (ShouldInlineSmiCase(expr->op())) {
   3945     Label slow;
   3946     patch_site.EmitJumpIfNotSmi(x0, &slow);
   3947 
   3948     // Save result for postfix expressions.
   3949     if (expr->is_postfix()) {
   3950       if (!context()->IsEffect()) {
   3951         // Save the result on the stack. If we have a named or keyed property we
   3952         // store the result under the receiver that is currently on top of the
   3953         // stack.
   3954         switch (assign_type) {
   3955           case VARIABLE:
   3956             __ Push(x0);
   3957             break;
   3958           case NAMED_PROPERTY:
   3959             __ Poke(x0, kPointerSize);
   3960             break;
   3961           case KEYED_PROPERTY:
   3962             __ Poke(x0, kPointerSize * 2);
   3963             break;
   3964         }
   3965       }
   3966     }
   3967 
   3968     __ Adds(x0, x0, Smi::FromInt(count_value));
   3969     __ B(vc, &done);
   3970     // Call stub. Undo operation first.
   3971     __ Sub(x0, x0, Smi::FromInt(count_value));
   3972     __ B(&stub_call);
   3973     __ Bind(&slow);
   3974   }
   3975   ToNumberStub convert_stub(isolate());
   3976   __ CallStub(&convert_stub);
   3977 
   3978   // Save result for postfix expressions.
   3979   if (expr->is_postfix()) {
   3980     if (!context()->IsEffect()) {
   3981       // Save the result on the stack. If we have a named or keyed property
   3982       // we store the result under the receiver that is currently on top
   3983       // of the stack.
   3984       switch (assign_type) {
   3985         case VARIABLE:
   3986           __ Push(x0);
   3987           break;
   3988         case NAMED_PROPERTY:
   3989           __ Poke(x0, kXRegSize);
   3990           break;
   3991         case KEYED_PROPERTY:
   3992           __ Poke(x0, 2 * kXRegSize);
   3993           break;
   3994       }
   3995     }
   3996   }
   3997 
   3998   __ Bind(&stub_call);
   3999   __ Mov(x1, x0);
   4000   __ Mov(x0, Smi::FromInt(count_value));
   4001 
   4002   // Record position before stub call.
   4003   SetSourcePosition(expr->position());
   4004 
   4005   {
   4006     Assembler::BlockPoolsScope scope(masm_);
   4007     BinaryOpICStub stub(isolate(), Token::ADD, NO_OVERWRITE);
   4008     CallIC(stub.GetCode(), expr->CountBinOpFeedbackId());
   4009     patch_site.EmitPatchInfo();
   4010   }
   4011   __ Bind(&done);
   4012 
   4013   // Store the value returned in x0.
   4014   switch (assign_type) {
   4015     case VARIABLE:
   4016       if (expr->is_postfix()) {
   4017         { EffectContext context(this);
   4018           EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
   4019                                  Token::ASSIGN);
   4020           PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   4021           context.Plug(x0);
   4022         }
   4023         // For all contexts except EffectConstant We have the result on
   4024         // top of the stack.
   4025         if (!context()->IsEffect()) {
   4026           context()->PlugTOS();
   4027         }
   4028       } else {
   4029         EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
   4030                                Token::ASSIGN);
   4031         PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   4032         context()->Plug(x0);
   4033       }
   4034       break;
   4035     case NAMED_PROPERTY: {
   4036       __ Mov(x2, Operand(prop->key()->AsLiteral()->value()));
   4037       __ Pop(x1);
   4038       CallStoreIC(expr->CountStoreFeedbackId());
   4039       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   4040       if (expr->is_postfix()) {
   4041         if (!context()->IsEffect()) {
   4042           context()->PlugTOS();
   4043         }
   4044       } else {
   4045         context()->Plug(x0);
   4046       }
   4047       break;
   4048     }
   4049     case KEYED_PROPERTY: {
   4050       __ Pop(x1);  // Key.
   4051       __ Pop(x2);  // Receiver.
   4052       Handle<Code> ic = strict_mode() == SLOPPY
   4053           ? isolate()->builtins()->KeyedStoreIC_Initialize()
   4054           : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
   4055       CallIC(ic, expr->CountStoreFeedbackId());
   4056       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   4057       if (expr->is_postfix()) {
   4058         if (!context()->IsEffect()) {
   4059           context()->PlugTOS();
   4060         }
   4061       } else {
   4062         context()->Plug(x0);
   4063       }
   4064       break;
   4065     }
   4066   }
   4067 }
   4068 
   4069 
   4070 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
   4071   ASSERT(!context()->IsEffect());
   4072   ASSERT(!context()->IsTest());
   4073   VariableProxy* proxy = expr->AsVariableProxy();
   4074   if (proxy != NULL && proxy->var()->IsUnallocated()) {
   4075     Comment cmnt(masm_, "Global variable");
   4076     __ Ldr(x0, GlobalObjectMemOperand());
   4077     __ Mov(x2, Operand(proxy->name()));
   4078     // Use a regular load, not a contextual load, to avoid a reference
   4079     // error.
   4080     CallLoadIC(NOT_CONTEXTUAL);
   4081     PrepareForBailout(expr, TOS_REG);
   4082     context()->Plug(x0);
   4083   } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
   4084     Label done, slow;
   4085 
   4086     // Generate code for loading from variables potentially shadowed
   4087     // by eval-introduced variables.
   4088     EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
   4089 
   4090     __ Bind(&slow);
   4091     __ Mov(x0, Operand(proxy->name()));
   4092     __ Push(cp, x0);
   4093     __ CallRuntime(Runtime::kHiddenLoadContextSlotNoReferenceError, 2);
   4094     PrepareForBailout(expr, TOS_REG);
   4095     __ Bind(&done);
   4096 
   4097     context()->Plug(x0);
   4098   } else {
   4099     // This expression cannot throw a reference error at the top level.
   4100     VisitInDuplicateContext(expr);
   4101   }
   4102 }
   4103 
   4104 
   4105 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
   4106                                                  Expression* sub_expr,
   4107                                                  Handle<String> check) {
   4108   ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof");
   4109   Comment cmnt(masm_, "[ EmitLiteralCompareTypeof");
   4110   Label materialize_true, materialize_false;
   4111   Label* if_true = NULL;
   4112   Label* if_false = NULL;
   4113   Label* fall_through = NULL;
   4114   context()->PrepareTest(&materialize_true, &materialize_false,
   4115                          &if_true, &if_false, &fall_through);
   4116 
   4117   { AccumulatorValueContext context(this);
   4118     VisitForTypeofValue(sub_expr);
   4119   }
   4120   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   4121 
   4122   Factory* factory = isolate()->factory();
   4123   if (String::Equals(check, factory->number_string())) {
   4124     ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof number_string");
   4125     __ JumpIfSmi(x0, if_true);
   4126     __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
   4127     __ CompareRoot(x0, Heap::kHeapNumberMapRootIndex);
   4128     Split(eq, if_true, if_false, fall_through);
   4129   } else if (String::Equals(check, factory->string_string())) {
   4130     ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof string_string");
   4131     __ JumpIfSmi(x0, if_false);
   4132     // Check for undetectable objects => false.
   4133     __ JumpIfObjectType(x0, x0, x1, FIRST_NONSTRING_TYPE, if_false, ge);
   4134     __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
   4135     __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_true, if_false,
   4136                     fall_through);
   4137   } else if (String::Equals(check, factory->symbol_string())) {
   4138     ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof symbol_string");
   4139     __ JumpIfSmi(x0, if_false);
   4140     __ CompareObjectType(x0, x0, x1, SYMBOL_TYPE);
   4141     Split(eq, if_true, if_false, fall_through);
   4142   } else if (String::Equals(check, factory->boolean_string())) {
   4143     ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof boolean_string");
   4144     __ JumpIfRoot(x0, Heap::kTrueValueRootIndex, if_true);
   4145     __ CompareRoot(x0, Heap::kFalseValueRootIndex);
   4146     Split(eq, if_true, if_false, fall_through);
   4147   } else if (FLAG_harmony_typeof &&
   4148              String::Equals(check, factory->null_string())) {
   4149     ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof null_string");
   4150     __ CompareRoot(x0, Heap::kNullValueRootIndex);
   4151     Split(eq, if_true, if_false, fall_through);
   4152   } else if (String::Equals(check, factory->undefined_string())) {
   4153     ASM_LOCATION(
   4154         "FullCodeGenerator::EmitLiteralCompareTypeof undefined_string");
   4155     __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, if_true);
   4156     __ JumpIfSmi(x0, if_false);
   4157     // Check for undetectable objects => true.
   4158     __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
   4159     __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
   4160     __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_false, if_true,
   4161                     fall_through);
   4162   } else if (String::Equals(check, factory->function_string())) {
   4163     ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof function_string");
   4164     __ JumpIfSmi(x0, if_false);
   4165     STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
   4166     __ JumpIfObjectType(x0, x10, x11, JS_FUNCTION_TYPE, if_true);
   4167     __ CompareAndSplit(x11, JS_FUNCTION_PROXY_TYPE, eq, if_true, if_false,
   4168                        fall_through);
   4169 
   4170   } else if (String::Equals(check, factory->object_string())) {
   4171     ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof object_string");
   4172     __ JumpIfSmi(x0, if_false);
   4173     if (!FLAG_harmony_typeof) {
   4174       __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true);
   4175     }
   4176     // Check for JS objects => true.
   4177     Register map = x10;
   4178     __ JumpIfObjectType(x0, map, x11, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE,
   4179                         if_false, lt);
   4180     __ CompareInstanceType(map, x11, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
   4181     __ B(gt, if_false);
   4182     // Check for undetectable objects => false.
   4183     __ Ldrb(x10, FieldMemOperand(map, Map::kBitFieldOffset));
   4184 
   4185     __ TestAndSplit(x10, 1 << Map::kIsUndetectable, if_true, if_false,
   4186                     fall_through);
   4187 
   4188   } else {
   4189     ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof other");
   4190     if (if_false != fall_through) __ B(if_false);
   4191   }
   4192   context()->Plug(if_true, if_false);
   4193 }
   4194 
   4195 
   4196 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
   4197   Comment cmnt(masm_, "[ CompareOperation");
   4198   SetSourcePosition(expr->position());
   4199 
   4200   // Try to generate an optimized comparison with a literal value.
   4201   // TODO(jbramley): This only checks common values like NaN or undefined.
   4202   // Should it also handle ARM64 immediate operands?
   4203   if (TryLiteralCompare(expr)) {
   4204     return;
   4205   }
   4206 
   4207   // Assign labels according to context()->PrepareTest.
   4208   Label materialize_true;
   4209   Label materialize_false;
   4210   Label* if_true = NULL;
   4211   Label* if_false = NULL;
   4212   Label* fall_through = NULL;
   4213   context()->PrepareTest(&materialize_true, &materialize_false,
   4214                          &if_true, &if_false, &fall_through);
   4215 
   4216   Token::Value op = expr->op();
   4217   VisitForStackValue(expr->left());
   4218   switch (op) {
   4219     case Token::IN:
   4220       VisitForStackValue(expr->right());
   4221       __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
   4222       PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
   4223       __ CompareRoot(x0, Heap::kTrueValueRootIndex);
   4224       Split(eq, if_true, if_false, fall_through);
   4225       break;
   4226 
   4227     case Token::INSTANCEOF: {
   4228       VisitForStackValue(expr->right());
   4229       InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
   4230       __ CallStub(&stub);
   4231       PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   4232       // The stub returns 0 for true.
   4233       __ CompareAndSplit(x0, 0, eq, if_true, if_false, fall_through);
   4234       break;
   4235     }
   4236 
   4237     default: {
   4238       VisitForAccumulatorValue(expr->right());
   4239       Condition cond = CompareIC::ComputeCondition(op);
   4240 
   4241       // Pop the stack value.
   4242       __ Pop(x1);
   4243 
   4244       JumpPatchSite patch_site(masm_);
   4245       if (ShouldInlineSmiCase(op)) {
   4246         Label slow_case;
   4247         patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
   4248         __ Cmp(x1, x0);
   4249         Split(cond, if_true, if_false, NULL);
   4250         __ Bind(&slow_case);
   4251       }
   4252 
   4253       // Record position and call the compare IC.
   4254       SetSourcePosition(expr->position());
   4255       Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
   4256       CallIC(ic, expr->CompareOperationFeedbackId());
   4257       patch_site.EmitPatchInfo();
   4258       PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   4259       __ CompareAndSplit(x0, 0, cond, if_true, if_false, fall_through);
   4260     }
   4261   }
   4262 
   4263   // Convert the result of the comparison into one expected for this
   4264   // expression's context.
   4265   context()->Plug(if_true, if_false);
   4266 }
   4267 
   4268 
   4269 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
   4270                                               Expression* sub_expr,
   4271                                               NilValue nil) {
   4272   ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareNil");
   4273   Label materialize_true, materialize_false;
   4274   Label* if_true = NULL;
   4275   Label* if_false = NULL;
   4276   Label* fall_through = NULL;
   4277   context()->PrepareTest(&materialize_true, &materialize_false,
   4278                          &if_true, &if_false, &fall_through);
   4279 
   4280   VisitForAccumulatorValue(sub_expr);
   4281   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   4282 
   4283   if (expr->op() == Token::EQ_STRICT) {
   4284     Heap::RootListIndex nil_value = nil == kNullValue ?
   4285         Heap::kNullValueRootIndex :
   4286         Heap::kUndefinedValueRootIndex;
   4287     __ CompareRoot(x0, nil_value);
   4288     Split(eq, if_true, if_false, fall_through);
   4289   } else {
   4290     Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
   4291     CallIC(ic, expr->CompareOperationFeedbackId());
   4292     __ CompareAndSplit(x0, 0, ne, if_true, if_false, fall_through);
   4293   }
   4294 
   4295   context()->Plug(if_true, if_false);
   4296 }
   4297 
   4298 
   4299 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
   4300   __ Ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
   4301   context()->Plug(x0);
   4302 }
   4303 
   4304 
   4305 void FullCodeGenerator::VisitYield(Yield* expr) {
   4306   Comment cmnt(masm_, "[ Yield");
   4307   // Evaluate yielded value first; the initial iterator definition depends on
   4308   // this. It stays on the stack while we update the iterator.
   4309   VisitForStackValue(expr->expression());
   4310 
   4311   // TODO(jbramley): Tidy this up once the merge is done, using named registers
   4312   // and suchlike. The implementation changes a little by bleeding_edge so I
   4313   // don't want to spend too much time on it now.
   4314 
   4315   switch (expr->yield_kind()) {
   4316     case Yield::SUSPEND:
   4317       // Pop value from top-of-stack slot; box result into result register.
   4318       EmitCreateIteratorResult(false);
   4319       __ Push(result_register());
   4320       // Fall through.
   4321     case Yield::INITIAL: {
   4322       Label suspend, continuation, post_runtime, resume;
   4323 
   4324       __ B(&suspend);
   4325 
   4326       // TODO(jbramley): This label is bound here because the following code
   4327       // looks at its pos(). Is it possible to do something more efficient here,
   4328       // perhaps using Adr?
   4329       __ Bind(&continuation);
   4330       __ B(&resume);
   4331 
   4332       __ Bind(&suspend);
   4333       VisitForAccumulatorValue(expr->generator_object());
   4334       ASSERT((continuation.pos() > 0) && Smi::IsValid(continuation.pos()));
   4335       __ Mov(x1, Smi::FromInt(continuation.pos()));
   4336       __ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset));
   4337       __ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset));
   4338       __ Mov(x1, cp);
   4339       __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2,
   4340                           kLRHasBeenSaved, kDontSaveFPRegs);
   4341       __ Add(x1, fp, StandardFrameConstants::kExpressionsOffset);
   4342       __ Cmp(__ StackPointer(), x1);
   4343       __ B(eq, &post_runtime);
   4344       __ Push(x0);  // generator object
   4345       __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1);
   4346       __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
   4347       __ Bind(&post_runtime);
   4348       __ Pop(result_register());
   4349       EmitReturnSequence();
   4350 
   4351       __ Bind(&resume);
   4352       context()->Plug(result_register());
   4353       break;
   4354     }
   4355 
   4356     case Yield::FINAL: {
   4357       VisitForAccumulatorValue(expr->generator_object());
   4358       __ Mov(x1, Smi::FromInt(JSGeneratorObject::kGeneratorClosed));
   4359       __ Str(x1, FieldMemOperand(result_register(),
   4360                                  JSGeneratorObject::kContinuationOffset));
   4361       // Pop value from top-of-stack slot, box result into result register.
   4362       EmitCreateIteratorResult(true);
   4363       EmitUnwindBeforeReturn();
   4364       EmitReturnSequence();
   4365       break;
   4366     }
   4367 
   4368     case Yield::DELEGATING: {
   4369       VisitForStackValue(expr->generator_object());
   4370 
   4371       // Initial stack layout is as follows:
   4372       // [sp + 1 * kPointerSize] iter
   4373       // [sp + 0 * kPointerSize] g
   4374 
   4375       Label l_catch, l_try, l_suspend, l_continuation, l_resume;
   4376       Label l_next, l_call, l_loop;
   4377       // Initial send value is undefined.
   4378       __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
   4379       __ B(&l_next);
   4380 
   4381       // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
   4382       __ Bind(&l_catch);
   4383       handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
   4384       __ LoadRoot(x2, Heap::kthrow_stringRootIndex);  // "throw"
   4385       __ Peek(x3, 1 * kPointerSize);                  // iter
   4386       __ Push(x2, x3, x0);                            // "throw", iter, except
   4387       __ B(&l_call);
   4388 
   4389       // try { received = %yield result }
   4390       // Shuffle the received result above a try handler and yield it without
   4391       // re-boxing.
   4392       __ Bind(&l_try);
   4393       __ Pop(x0);                                        // result
   4394       __ PushTryHandler(StackHandler::CATCH, expr->index());
   4395       const int handler_size = StackHandlerConstants::kSize;
   4396       __ Push(x0);                                       // result
   4397       __ B(&l_suspend);
   4398 
   4399       // TODO(jbramley): This label is bound here because the following code
   4400       // looks at its pos(). Is it possible to do something more efficient here,
   4401       // perhaps using Adr?
   4402       __ Bind(&l_continuation);
   4403       __ B(&l_resume);
   4404 
   4405       __ Bind(&l_suspend);
   4406       const int generator_object_depth = kPointerSize + handler_size;
   4407       __ Peek(x0, generator_object_depth);
   4408       __ Push(x0);                                       // g
   4409       ASSERT((l_continuation.pos() > 0) && Smi::IsValid(l_continuation.pos()));
   4410       __ Mov(x1, Smi::FromInt(l_continuation.pos()));
   4411       __ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset));
   4412       __ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset));
   4413       __ Mov(x1, cp);
   4414       __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2,
   4415                           kLRHasBeenSaved, kDontSaveFPRegs);
   4416       __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1);
   4417       __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
   4418       __ Pop(x0);                                        // result
   4419       EmitReturnSequence();
   4420       __ Bind(&l_resume);                                // received in x0
   4421       __ PopTryHandler();
   4422 
   4423       // receiver = iter; f = 'next'; arg = received;
   4424       __ Bind(&l_next);
   4425       __ LoadRoot(x2, Heap::knext_stringRootIndex);  // "next"
   4426       __ Peek(x3, 1 * kPointerSize);                 // iter
   4427       __ Push(x2, x3, x0);                           // "next", iter, received
   4428 
   4429       // result = receiver[f](arg);
   4430       __ Bind(&l_call);
   4431       __ Peek(x1, 1 * kPointerSize);
   4432       __ Peek(x0, 2 * kPointerSize);
   4433       Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
   4434       CallIC(ic, TypeFeedbackId::None());
   4435       __ Mov(x1, x0);
   4436       __ Poke(x1, 2 * kPointerSize);
   4437       CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
   4438       __ CallStub(&stub);
   4439 
   4440       __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
   4441       __ Drop(1);  // The function is still on the stack; drop it.
   4442 
   4443       // if (!result.done) goto l_try;
   4444       __ Bind(&l_loop);
   4445       __ Push(x0);                                       // save result
   4446       __ LoadRoot(x2, Heap::kdone_stringRootIndex);      // "done"
   4447       CallLoadIC(NOT_CONTEXTUAL);                        // result.done in x0
   4448       // The ToBooleanStub argument (result.done) is in x0.
   4449       Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
   4450       CallIC(bool_ic);
   4451       __ Cbz(x0, &l_try);
   4452 
   4453       // result.value
   4454       __ Pop(x0);                                        // result
   4455       __ LoadRoot(x2, Heap::kvalue_stringRootIndex);     // "value"
   4456       CallLoadIC(NOT_CONTEXTUAL);                        // result.value in x0
   4457       context()->DropAndPlug(2, x0);                     // drop iter and g
   4458       break;
   4459     }
   4460   }
   4461 }
   4462 
   4463 
   4464 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
   4465     Expression *value,
   4466     JSGeneratorObject::ResumeMode resume_mode) {
   4467   ASM_LOCATION("FullCodeGenerator::EmitGeneratorResume");
   4468   Register value_reg = x0;
   4469   Register generator_object = x1;
   4470   Register the_hole = x2;
   4471   Register operand_stack_size = w3;
   4472   Register function = x4;
   4473 
   4474   // The value stays in x0, and is ultimately read by the resumed generator, as
   4475   // if CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject) returned it. Or it
   4476   // is read to throw the value when the resumed generator is already closed. r1
   4477   // will hold the generator object until the activation has been resumed.
   4478   VisitForStackValue(generator);
   4479   VisitForAccumulatorValue(value);
   4480   __ Pop(generator_object);
   4481 
   4482   // Check generator state.
   4483   Label wrong_state, closed_state, done;
   4484   __ Ldr(x10, FieldMemOperand(generator_object,
   4485                               JSGeneratorObject::kContinuationOffset));
   4486   STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0);
   4487   STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0);
   4488   __ CompareAndBranch(x10, Smi::FromInt(0), eq, &closed_state);
   4489   __ CompareAndBranch(x10, Smi::FromInt(0), lt, &wrong_state);
   4490 
   4491   // Load suspended function and context.
   4492   __ Ldr(cp, FieldMemOperand(generator_object,
   4493                              JSGeneratorObject::kContextOffset));
   4494   __ Ldr(function, FieldMemOperand(generator_object,
   4495                                    JSGeneratorObject::kFunctionOffset));
   4496 
   4497   // Load receiver and store as the first argument.
   4498   __ Ldr(x10, FieldMemOperand(generator_object,
   4499                               JSGeneratorObject::kReceiverOffset));
   4500   __ Push(x10);
   4501 
   4502   // Push holes for the rest of the arguments to the generator function.
   4503   __ Ldr(x10, FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
   4504 
   4505   // The number of arguments is stored as an int32_t, and -1 is a marker
   4506   // (SharedFunctionInfo::kDontAdaptArgumentsSentinel), so we need sign
   4507   // extension to correctly handle it. However, in this case, we operate on
   4508   // 32-bit W registers, so extension isn't required.
   4509   __ Ldr(w10, FieldMemOperand(x10,
   4510                               SharedFunctionInfo::kFormalParameterCountOffset));
   4511   __ LoadRoot(the_hole, Heap::kTheHoleValueRootIndex);
   4512   __ PushMultipleTimes(the_hole, w10);
   4513 
   4514   // Enter a new JavaScript frame, and initialize its slots as they were when
   4515   // the generator was suspended.
   4516   Label resume_frame;
   4517   __ Bl(&resume_frame);
   4518   __ B(&done);
   4519 
   4520   __ Bind(&resume_frame);
   4521   __ Push(lr,           // Return address.
   4522           fp,           // Caller's frame pointer.
   4523           cp,           // Callee's context.
   4524           function);    // Callee's JS Function.
   4525   __ Add(fp, __ StackPointer(), kPointerSize * 2);
   4526 
   4527   // Load and untag the operand stack size.
   4528   __ Ldr(x10, FieldMemOperand(generator_object,
   4529                               JSGeneratorObject::kOperandStackOffset));
   4530   __ Ldr(operand_stack_size,
   4531          UntagSmiFieldMemOperand(x10, FixedArray::kLengthOffset));
   4532 
   4533   // If we are sending a value and there is no operand stack, we can jump back
   4534   // in directly.
   4535   if (resume_mode == JSGeneratorObject::NEXT) {
   4536     Label slow_resume;
   4537     __ Cbnz(operand_stack_size, &slow_resume);
   4538     __ Ldr(x10, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
   4539     __ Ldrsw(x11,
   4540              UntagSmiFieldMemOperand(generator_object,
   4541                                      JSGeneratorObject::kContinuationOffset));
   4542     __ Add(x10, x10, x11);
   4543     __ Mov(x12, Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
   4544     __ Str(x12, FieldMemOperand(generator_object,
   4545                                 JSGeneratorObject::kContinuationOffset));
   4546     __ Br(x10);
   4547 
   4548     __ Bind(&slow_resume);
   4549   }
   4550 
   4551   // Otherwise, we push holes for the operand stack and call the runtime to fix
   4552   // up the stack and the handlers.
   4553   __ PushMultipleTimes(the_hole, operand_stack_size);
   4554 
   4555   __ Mov(x10, Smi::FromInt(resume_mode));
   4556   __ Push(generator_object, result_register(), x10);
   4557   __ CallRuntime(Runtime::kHiddenResumeJSGeneratorObject, 3);
   4558   // Not reached: the runtime call returns elsewhere.
   4559   __ Unreachable();
   4560 
   4561   // Reach here when generator is closed.
   4562   __ Bind(&closed_state);
   4563   if (resume_mode == JSGeneratorObject::NEXT) {
   4564     // Return completed iterator result when generator is closed.
   4565     __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
   4566     __ Push(x10);
   4567     // Pop value from top-of-stack slot; box result into result register.
   4568     EmitCreateIteratorResult(true);
   4569   } else {
   4570     // Throw the provided value.
   4571     __ Push(value_reg);
   4572     __ CallRuntime(Runtime::kHiddenThrow, 1);
   4573   }
   4574   __ B(&done);
   4575 
   4576   // Throw error if we attempt to operate on a running generator.
   4577   __ Bind(&wrong_state);
   4578   __ Push(generator_object);
   4579   __ CallRuntime(Runtime::kHiddenThrowGeneratorStateError, 1);
   4580 
   4581   __ Bind(&done);
   4582   context()->Plug(result_register());
   4583 }
   4584 
   4585 
   4586 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
   4587   Label gc_required;
   4588   Label allocated;
   4589 
   4590   Handle<Map> map(isolate()->native_context()->iterator_result_map());
   4591 
   4592   // Allocate and populate an object with this form: { value: VAL, done: DONE }
   4593 
   4594   Register result = x0;
   4595   __ Allocate(map->instance_size(), result, x10, x11, &gc_required, TAG_OBJECT);
   4596   __ B(&allocated);
   4597 
   4598   __ Bind(&gc_required);
   4599   __ Push(Smi::FromInt(map->instance_size()));
   4600   __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1);
   4601   __ Ldr(context_register(),
   4602          MemOperand(fp, StandardFrameConstants::kContextOffset));
   4603 
   4604   __ Bind(&allocated);
   4605   Register map_reg = x1;
   4606   Register result_value = x2;
   4607   Register boolean_done = x3;
   4608   Register empty_fixed_array = x4;
   4609   Register untagged_result = x5;
   4610   __ Mov(map_reg, Operand(map));
   4611   __ Pop(result_value);
   4612   __ Mov(boolean_done, Operand(isolate()->factory()->ToBoolean(done)));
   4613   __ Mov(empty_fixed_array, Operand(isolate()->factory()->empty_fixed_array()));
   4614   ASSERT_EQ(map->instance_size(), 5 * kPointerSize);
   4615   STATIC_ASSERT(JSObject::kPropertiesOffset + kPointerSize ==
   4616                 JSObject::kElementsOffset);
   4617   STATIC_ASSERT(JSGeneratorObject::kResultValuePropertyOffset + kPointerSize ==
   4618                 JSGeneratorObject::kResultDonePropertyOffset);
   4619   __ ObjectUntag(untagged_result, result);
   4620   __ Str(map_reg, MemOperand(untagged_result, HeapObject::kMapOffset));
   4621   __ Stp(empty_fixed_array, empty_fixed_array,
   4622          MemOperand(untagged_result, JSObject::kPropertiesOffset));
   4623   __ Stp(result_value, boolean_done,
   4624          MemOperand(untagged_result,
   4625                     JSGeneratorObject::kResultValuePropertyOffset));
   4626 
   4627   // Only the value field needs a write barrier, as the other values are in the
   4628   // root set.
   4629   __ RecordWriteField(result, JSGeneratorObject::kResultValuePropertyOffset,
   4630                       x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
   4631 }
   4632 
   4633 
   4634 // TODO(all): I don't like this method.
   4635 // It seems to me that in too many places x0 is used in place of this.
   4636 // Also, this function is not suitable for all places where x0 should be
   4637 // abstracted (eg. when used as an argument). But some places assume that the
   4638 // first argument register is x0, and use this function instead.
   4639 // Considering that most of the register allocation is hard-coded in the
   4640 // FullCodeGen, that it is unlikely we will need to change it extensively, and
   4641 // that abstracting the allocation through functions would not yield any
   4642 // performance benefit, I think the existence of this function is debatable.
   4643 Register FullCodeGenerator::result_register() {
   4644   return x0;
   4645 }
   4646 
   4647 
   4648 Register FullCodeGenerator::context_register() {
   4649   return cp;
   4650 }
   4651 
   4652 
   4653 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
   4654   ASSERT(POINTER_SIZE_ALIGN(frame_offset) == frame_offset);
   4655   __ Str(value, MemOperand(fp, frame_offset));
   4656 }
   4657 
   4658 
   4659 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
   4660   __ Ldr(dst, ContextMemOperand(cp, context_index));
   4661 }
   4662 
   4663 
   4664 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
   4665   Scope* declaration_scope = scope()->DeclarationScope();
   4666   if (declaration_scope->is_global_scope() ||
   4667       declaration_scope->is_module_scope()) {
   4668     // Contexts nested in the native context have a canonical empty function
   4669     // as their closure, not the anonymous closure containing the global
   4670     // code.  Pass a smi sentinel and let the runtime look up the empty
   4671     // function.
   4672     ASSERT(kSmiTag == 0);
   4673     __ Push(xzr);
   4674   } else if (declaration_scope->is_eval_scope()) {
   4675     // Contexts created by a call to eval have the same closure as the
   4676     // context calling eval, not the anonymous closure containing the eval
   4677     // code.  Fetch it from the context.
   4678     __ Ldr(x10, ContextMemOperand(cp, Context::CLOSURE_INDEX));
   4679     __ Push(x10);
   4680   } else {
   4681     ASSERT(declaration_scope->is_function_scope());
   4682     __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
   4683     __ Push(x10);
   4684   }
   4685 }
   4686 
   4687 
   4688 void FullCodeGenerator::EnterFinallyBlock() {
   4689   ASM_LOCATION("FullCodeGenerator::EnterFinallyBlock");
   4690   ASSERT(!result_register().is(x10));
   4691   // Preserve the result register while executing finally block.
   4692   // Also cook the return address in lr to the stack (smi encoded Code* delta).
   4693   __ Sub(x10, lr, Operand(masm_->CodeObject()));
   4694   __ SmiTag(x10);
   4695   __ Push(result_register(), x10);
   4696 
   4697   // Store pending message while executing finally block.
   4698   ExternalReference pending_message_obj =
   4699       ExternalReference::address_of_pending_message_obj(isolate());
   4700   __ Mov(x10, pending_message_obj);
   4701   __ Ldr(x10, MemOperand(x10));
   4702 
   4703   ExternalReference has_pending_message =
   4704       ExternalReference::address_of_has_pending_message(isolate());
   4705   STATIC_ASSERT(sizeof(bool) == 1);   // NOLINT(runtime/sizeof)
   4706   __ Mov(x11, has_pending_message);
   4707   __ Ldrb(x11, MemOperand(x11));
   4708   __ SmiTag(x11);
   4709 
   4710   __ Push(x10, x11);
   4711 
   4712   ExternalReference pending_message_script =
   4713       ExternalReference::address_of_pending_message_script(isolate());
   4714   __ Mov(x10, pending_message_script);
   4715   __ Ldr(x10, MemOperand(x10));
   4716   __ Push(x10);
   4717 }
   4718 
   4719 
   4720 void FullCodeGenerator::ExitFinallyBlock() {
   4721   ASM_LOCATION("FullCodeGenerator::ExitFinallyBlock");
   4722   ASSERT(!result_register().is(x10));
   4723 
   4724   // Restore pending message from stack.
   4725   __ Pop(x10, x11, x12);
   4726   ExternalReference pending_message_script =
   4727       ExternalReference::address_of_pending_message_script(isolate());
   4728   __ Mov(x13, pending_message_script);
   4729   __ Str(x10, MemOperand(x13));
   4730 
   4731   __ SmiUntag(x11);
   4732   ExternalReference has_pending_message =
   4733       ExternalReference::address_of_has_pending_message(isolate());
   4734   __ Mov(x13, has_pending_message);
   4735   STATIC_ASSERT(sizeof(bool) == 1);   // NOLINT(runtime/sizeof)
   4736   __ Strb(x11, MemOperand(x13));
   4737 
   4738   ExternalReference pending_message_obj =
   4739       ExternalReference::address_of_pending_message_obj(isolate());
   4740   __ Mov(x13, pending_message_obj);
   4741   __ Str(x12, MemOperand(x13));
   4742 
   4743   // Restore result register and cooked return address from the stack.
   4744   __ Pop(x10, result_register());
   4745 
   4746   // Uncook the return address (see EnterFinallyBlock).
   4747   __ SmiUntag(x10);
   4748   __ Add(x11, x10, Operand(masm_->CodeObject()));
   4749   __ Br(x11);
   4750 }
   4751 
   4752 
   4753 #undef __
   4754 
   4755 
   4756 void BackEdgeTable::PatchAt(Code* unoptimized_code,
   4757                             Address pc,
   4758                             BackEdgeState target_state,
   4759                             Code* replacement_code) {
   4760   // Turn the jump into a nop.
   4761   Address branch_address = pc - 3 * kInstructionSize;
   4762   PatchingAssembler patcher(branch_address, 1);
   4763 
   4764   ASSERT(Instruction::Cast(branch_address)
   4765              ->IsNop(Assembler::INTERRUPT_CODE_NOP) ||
   4766          (Instruction::Cast(branch_address)->IsCondBranchImm() &&
   4767           Instruction::Cast(branch_address)->ImmPCOffset() ==
   4768               6 * kInstructionSize));
   4769 
   4770   switch (target_state) {
   4771     case INTERRUPT:
   4772       //  <decrement profiling counter>
   4773       //  .. .. .. ..       b.pl ok
   4774       //  .. .. .. ..       ldr x16, pc+<interrupt stub address>
   4775       //  .. .. .. ..       blr x16
   4776       //  ... more instructions.
   4777       //  ok-label
   4778       // Jump offset is 6 instructions.
   4779       patcher.b(6, pl);
   4780       break;
   4781     case ON_STACK_REPLACEMENT:
   4782     case OSR_AFTER_STACK_CHECK:
   4783       //  <decrement profiling counter>
   4784       //  .. .. .. ..       mov x0, x0 (NOP)
   4785       //  .. .. .. ..       ldr x16, pc+<on-stack replacement address>
   4786       //  .. .. .. ..       blr x16
   4787       patcher.nop(Assembler::INTERRUPT_CODE_NOP);
   4788       break;
   4789   }
   4790 
   4791   // Replace the call address.
   4792   Instruction* load = Instruction::Cast(pc)->preceding(2);
   4793   Address interrupt_address_pointer =
   4794       reinterpret_cast<Address>(load) + load->ImmPCOffset();
   4795   ASSERT((Memory::uint64_at(interrupt_address_pointer) ==
   4796           reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
   4797                                          ->builtins()
   4798                                          ->OnStackReplacement()
   4799                                          ->entry())) ||
   4800          (Memory::uint64_at(interrupt_address_pointer) ==
   4801           reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
   4802                                          ->builtins()
   4803                                          ->InterruptCheck()
   4804                                          ->entry())) ||
   4805          (Memory::uint64_at(interrupt_address_pointer) ==
   4806           reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
   4807                                          ->builtins()
   4808                                          ->OsrAfterStackCheck()
   4809                                          ->entry())) ||
   4810          (Memory::uint64_at(interrupt_address_pointer) ==
   4811           reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
   4812                                          ->builtins()
   4813                                          ->OnStackReplacement()
   4814                                          ->entry())));
   4815   Memory::uint64_at(interrupt_address_pointer) =
   4816       reinterpret_cast<uint64_t>(replacement_code->entry());
   4817 
   4818   unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
   4819       unoptimized_code, reinterpret_cast<Address>(load), replacement_code);
   4820 }
   4821 
   4822 
   4823 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
   4824     Isolate* isolate,
   4825     Code* unoptimized_code,
   4826     Address pc) {
   4827   // TODO(jbramley): There should be some extra assertions here (as in the ARM
   4828   // back-end), but this function is gone in bleeding_edge so it might not
   4829   // matter anyway.
   4830   Instruction* jump_or_nop = Instruction::Cast(pc)->preceding(3);
   4831 
   4832   if (jump_or_nop->IsNop(Assembler::INTERRUPT_CODE_NOP)) {
   4833     Instruction* load = Instruction::Cast(pc)->preceding(2);
   4834     uint64_t entry = Memory::uint64_at(reinterpret_cast<Address>(load) +
   4835                                        load->ImmPCOffset());
   4836     if (entry == reinterpret_cast<uint64_t>(
   4837         isolate->builtins()->OnStackReplacement()->entry())) {
   4838       return ON_STACK_REPLACEMENT;
   4839     } else if (entry == reinterpret_cast<uint64_t>(
   4840         isolate->builtins()->OsrAfterStackCheck()->entry())) {
   4841       return OSR_AFTER_STACK_CHECK;
   4842     } else {
   4843       UNREACHABLE();
   4844     }
   4845   }
   4846 
   4847   return INTERRUPT;
   4848 }
   4849 
   4850 
   4851 #define __ ACCESS_MASM(masm())
   4852 
   4853 
   4854 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
   4855     int* stack_depth,
   4856     int* context_length) {
   4857   ASM_LOCATION("FullCodeGenerator::TryFinally::Exit");
   4858   // The macros used here must preserve the result register.
   4859 
   4860   // Because the handler block contains the context of the finally
   4861   // code, we can restore it directly from there for the finally code
   4862   // rather than iteratively unwinding contexts via their previous
   4863   // links.
   4864   __ Drop(*stack_depth);  // Down to the handler block.
   4865   if (*context_length > 0) {
   4866     // Restore the context to its dedicated register and the stack.
   4867     __ Peek(cp, StackHandlerConstants::kContextOffset);
   4868     __ Str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
   4869   }
   4870   __ PopTryHandler();
   4871   __ Bl(finally_entry_);
   4872 
   4873   *stack_depth = 0;
   4874   *context_length = 0;
   4875   return previous_;
   4876 }
   4877 
   4878 
   4879 #undef __
   4880 
   4881 
   4882 } }  // namespace v8::internal
   4883 
   4884 #endif  // V8_TARGET_ARCH_ARM64
   4885