Home | History | Annotate | Download | only in arm
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 
     28 #include "v8.h"
     29 
     30 #if V8_TARGET_ARCH_ARM
     31 
     32 #include "code-stubs.h"
     33 #include "codegen.h"
     34 #include "compiler.h"
     35 #include "debug.h"
     36 #include "full-codegen.h"
     37 #include "isolate-inl.h"
     38 #include "parser.h"
     39 #include "scopes.h"
     40 #include "stub-cache.h"
     41 
     42 #include "arm/code-stubs-arm.h"
     43 #include "arm/macro-assembler-arm.h"
     44 
     45 namespace v8 {
     46 namespace internal {
     47 
     48 #define __ ACCESS_MASM(masm_)
     49 
     50 
     51 // A patch site is a location in the code which it is possible to patch. This
     52 // class has a number of methods to emit the code which is patchable and the
     53 // method EmitPatchInfo to record a marker back to the patchable code. This
     54 // marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit
     55 // immediate value is used) is the delta from the pc to the first instruction of
     56 // the patchable code.
     57 class JumpPatchSite BASE_EMBEDDED {
     58  public:
     59   explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
     60 #ifdef DEBUG
     61     info_emitted_ = false;
     62 #endif
     63   }
     64 
     65   ~JumpPatchSite() {
     66     ASSERT(patch_site_.is_bound() == info_emitted_);
     67   }
     68 
     69   // When initially emitting this ensure that a jump is always generated to skip
     70   // the inlined smi code.
     71   void EmitJumpIfNotSmi(Register reg, Label* target) {
     72     ASSERT(!patch_site_.is_bound() && !info_emitted_);
     73     Assembler::BlockConstPoolScope block_const_pool(masm_);
     74     __ bind(&patch_site_);
     75     __ cmp(reg, Operand(reg));
     76     __ b(eq, target);  // Always taken before patched.
     77   }
     78 
     79   // When initially emitting this ensure that a jump is never generated to skip
     80   // the inlined smi code.
     81   void EmitJumpIfSmi(Register reg, Label* target) {
     82     ASSERT(!patch_site_.is_bound() && !info_emitted_);
     83     Assembler::BlockConstPoolScope block_const_pool(masm_);
     84     __ bind(&patch_site_);
     85     __ cmp(reg, Operand(reg));
     86     __ b(ne, target);  // Never taken before patched.
     87   }
     88 
     89   void EmitPatchInfo() {
     90     // Block literal pool emission whilst recording patch site information.
     91     Assembler::BlockConstPoolScope block_const_pool(masm_);
     92     if (patch_site_.is_bound()) {
     93       int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
     94       Register reg;
     95       reg.set_code(delta_to_patch_site / kOff12Mask);
     96       __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask);
     97 #ifdef DEBUG
     98       info_emitted_ = true;
     99 #endif
    100     } else {
    101       __ nop();  // Signals no inlined code.
    102     }
    103   }
    104 
    105  private:
    106   MacroAssembler* masm_;
    107   Label patch_site_;
    108 #ifdef DEBUG
    109   bool info_emitted_;
    110 #endif
    111 };
    112 
    113 
    114 // Generate code for a JS function.  On entry to the function the receiver
    115 // and arguments have been pushed on the stack left to right.  The actual
    116 // argument count matches the formal parameter count expected by the
    117 // function.
    118 //
    119 // The live registers are:
    120 //   o r1: the JS function object being called (i.e., ourselves)
    121 //   o cp: our context
    122 //   o fp: our caller's frame pointer
    123 //   o sp: stack pointer
    124 //   o lr: return address
    125 //
    126 // The function builds a JS frame.  Please see JavaScriptFrameConstants in
    127 // frames-arm.h for its layout.
    128 void FullCodeGenerator::Generate() {
    129   CompilationInfo* info = info_;
    130   handler_table_ =
    131       isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
    132   profiling_counter_ = isolate()->factory()->NewCell(
    133       Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
    134   SetFunctionPosition(function());
    135   Comment cmnt(masm_, "[ function compiled by full code generator");
    136 
    137   ProfileEntryHookStub::MaybeCallEntryHook(masm_);
    138 
    139 #ifdef DEBUG
    140   if (strlen(FLAG_stop_at) > 0 &&
    141       info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
    142     __ stop("stop-at");
    143   }
    144 #endif
    145 
    146   // Strict mode functions and builtins need to replace the receiver
    147   // with undefined when called as functions (without an explicit
    148   // receiver object). r5 is zero for method calls and non-zero for
    149   // function calls.
    150   if (!info->is_classic_mode() || info->is_native()) {
    151     Label ok;
    152     __ cmp(r5, Operand::Zero());
    153     __ b(eq, &ok);
    154     int receiver_offset = info->scope()->num_parameters() * kPointerSize;
    155     __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
    156     __ str(r2, MemOperand(sp, receiver_offset));
    157     __ bind(&ok);
    158   }
    159 
    160   // Open a frame scope to indicate that there is a frame on the stack.  The
    161   // MANUAL indicates that the scope shouldn't actually generate code to set up
    162   // the frame (that is done below).
    163   FrameScope frame_scope(masm_, StackFrame::MANUAL);
    164 
    165   info->set_prologue_offset(masm_->pc_offset());
    166   {
    167     PredictableCodeSizeScope predictible_code_size_scope(
    168         masm_, kNoCodeAgeSequenceLength * Assembler::kInstrSize);
    169     // The following three instructions must remain together and unmodified
    170     // for code aging to work properly.
    171     __ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
    172     __ nop(ip.code());
    173     // Adjust FP to point to saved FP.
    174     __ add(fp, sp, Operand(2 * kPointerSize));
    175   }
    176   info->AddNoFrameRange(0, masm_->pc_offset());
    177 
    178   { Comment cmnt(masm_, "[ Allocate locals");
    179     int locals_count = info->scope()->num_stack_slots();
    180     // Generators allocate locals, if any, in context slots.
    181     ASSERT(!info->function()->is_generator() || locals_count == 0);
    182     if (locals_count > 0) {
    183       __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
    184       for (int i = 0; i < locals_count; i++) {
    185         __ push(ip);
    186       }
    187     }
    188   }
    189 
    190   bool function_in_register = true;
    191 
    192   // Possibly allocate a local context.
    193   int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
    194   if (heap_slots > 0) {
    195     // Argument to NewContext is the function, which is still in r1.
    196     Comment cmnt(masm_, "[ Allocate context");
    197     __ push(r1);
    198     if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
    199       __ Push(info->scope()->GetScopeInfo());
    200       __ CallRuntime(Runtime::kNewGlobalContext, 2);
    201     } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
    202       FastNewContextStub stub(heap_slots);
    203       __ CallStub(&stub);
    204     } else {
    205       __ CallRuntime(Runtime::kNewFunctionContext, 1);
    206     }
    207     function_in_register = false;
    208     // Context is returned in both r0 and cp.  It replaces the context
    209     // passed to us.  It's saved in the stack and kept live in cp.
    210     __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
    211     // Copy any necessary parameters into the context.
    212     int num_parameters = info->scope()->num_parameters();
    213     for (int i = 0; i < num_parameters; i++) {
    214       Variable* var = scope()->parameter(i);
    215       if (var->IsContextSlot()) {
    216         int parameter_offset = StandardFrameConstants::kCallerSPOffset +
    217             (num_parameters - 1 - i) * kPointerSize;
    218         // Load parameter from stack.
    219         __ ldr(r0, MemOperand(fp, parameter_offset));
    220         // Store it in the context.
    221         MemOperand target = ContextOperand(cp, var->index());
    222         __ str(r0, target);
    223 
    224         // Update the write barrier.
    225         __ RecordWriteContextSlot(
    226             cp, target.offset(), r0, r3, kLRHasBeenSaved, kDontSaveFPRegs);
    227       }
    228     }
    229   }
    230 
    231   Variable* arguments = scope()->arguments();
    232   if (arguments != NULL) {
    233     // Function uses arguments object.
    234     Comment cmnt(masm_, "[ Allocate arguments object");
    235     if (!function_in_register) {
    236       // Load this again, if it's used by the local context below.
    237       __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
    238     } else {
    239       __ mov(r3, r1);
    240     }
    241     // Receiver is just before the parameters on the caller's stack.
    242     int num_parameters = info->scope()->num_parameters();
    243     int offset = num_parameters * kPointerSize;
    244     __ add(r2, fp,
    245            Operand(StandardFrameConstants::kCallerSPOffset + offset));
    246     __ mov(r1, Operand(Smi::FromInt(num_parameters)));
    247     __ Push(r3, r2, r1);
    248 
    249     // Arguments to ArgumentsAccessStub:
    250     //   function, receiver address, parameter count.
    251     // The stub will rewrite receiever and parameter count if the previous
    252     // stack frame was an arguments adapter frame.
    253     ArgumentsAccessStub::Type type;
    254     if (!is_classic_mode()) {
    255       type = ArgumentsAccessStub::NEW_STRICT;
    256     } else if (function()->has_duplicate_parameters()) {
    257       type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW;
    258     } else {
    259       type = ArgumentsAccessStub::NEW_NON_STRICT_FAST;
    260     }
    261     ArgumentsAccessStub stub(type);
    262     __ CallStub(&stub);
    263 
    264     SetVar(arguments, r0, r1, r2);
    265   }
    266 
    267   if (FLAG_trace) {
    268     __ CallRuntime(Runtime::kTraceEnter, 0);
    269   }
    270 
    271   // Visit the declarations and body unless there is an illegal
    272   // redeclaration.
    273   if (scope()->HasIllegalRedeclaration()) {
    274     Comment cmnt(masm_, "[ Declarations");
    275     scope()->VisitIllegalRedeclaration(this);
    276 
    277   } else {
    278     PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
    279     { Comment cmnt(masm_, "[ Declarations");
    280       // For named function expressions, declare the function name as a
    281       // constant.
    282       if (scope()->is_function_scope() && scope()->function() != NULL) {
    283         VariableDeclaration* function = scope()->function();
    284         ASSERT(function->proxy()->var()->mode() == CONST ||
    285                function->proxy()->var()->mode() == CONST_HARMONY);
    286         ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
    287         VisitVariableDeclaration(function);
    288       }
    289       VisitDeclarations(scope()->declarations());
    290     }
    291 
    292     { Comment cmnt(masm_, "[ Stack check");
    293       PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
    294       Label ok;
    295       __ LoadRoot(ip, Heap::kStackLimitRootIndex);
    296       __ cmp(sp, Operand(ip));
    297       __ b(hs, &ok);
    298       PredictableCodeSizeScope predictable(masm_, 2 * Assembler::kInstrSize);
    299       StackCheckStub stub;
    300       __ CallStub(&stub);
    301       __ bind(&ok);
    302     }
    303 
    304     { Comment cmnt(masm_, "[ Body");
    305       ASSERT(loop_depth() == 0);
    306       VisitStatements(function()->body());
    307       ASSERT(loop_depth() == 0);
    308     }
    309   }
    310 
    311   // Always emit a 'return undefined' in case control fell off the end of
    312   // the body.
    313   { Comment cmnt(masm_, "[ return <undefined>;");
    314     __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
    315   }
    316   EmitReturnSequence();
    317 
    318   // Force emit the constant pool, so it doesn't get emitted in the middle
    319   // of the back edge table.
    320   masm()->CheckConstPool(true, false);
    321 }
    322 
    323 
    324 void FullCodeGenerator::ClearAccumulator() {
    325   __ mov(r0, Operand(Smi::FromInt(0)));
    326 }
    327 
    328 
    329 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
    330   __ mov(r2, Operand(profiling_counter_));
    331   __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset));
    332   __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC);
    333   __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
    334 }
    335 
    336 
    337 void FullCodeGenerator::EmitProfilingCounterReset() {
    338   int reset_value = FLAG_interrupt_budget;
    339   if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) {
    340     // Self-optimization is a one-off thing: if it fails, don't try again.
    341     reset_value = Smi::kMaxValue;
    342   }
    343   if (isolate()->IsDebuggerActive()) {
    344     // Detect debug break requests as soon as possible.
    345     reset_value = FLAG_interrupt_budget >> 4;
    346   }
    347   __ mov(r2, Operand(profiling_counter_));
    348   __ mov(r3, Operand(Smi::FromInt(reset_value)));
    349   __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
    350 }
    351 
    352 
    353 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
    354                                                 Label* back_edge_target) {
    355   Comment cmnt(masm_, "[ Back edge bookkeeping");
    356   // Block literal pools whilst emitting back edge code.
    357   Assembler::BlockConstPoolScope block_const_pool(masm_);
    358   Label ok;
    359 
    360   int weight = 1;
    361   if (FLAG_weighted_back_edges) {
    362     ASSERT(back_edge_target->is_bound());
    363     int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
    364     weight = Min(kMaxBackEdgeWeight,
    365                  Max(1, distance / kCodeSizeMultiplier));
    366   }
    367   EmitProfilingCounterDecrement(weight);
    368   __ b(pl, &ok);
    369   InterruptStub stub;
    370   __ CallStub(&stub);
    371 
    372   // Record a mapping of this PC offset to the OSR id.  This is used to find
    373   // the AST id from the unoptimized code in order to use it as a key into
    374   // the deoptimization input data found in the optimized code.
    375   RecordBackEdge(stmt->OsrEntryId());
    376 
    377   EmitProfilingCounterReset();
    378 
    379   __ bind(&ok);
    380   PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
    381   // Record a mapping of the OSR id to this PC.  This is used if the OSR
    382   // entry becomes the target of a bailout.  We don't expect it to be, but
    383   // we want it to work if it is.
    384   PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
    385 }
    386 
    387 
    388 void FullCodeGenerator::EmitReturnSequence() {
    389   Comment cmnt(masm_, "[ Return sequence");
    390   if (return_label_.is_bound()) {
    391     __ b(&return_label_);
    392   } else {
    393     __ bind(&return_label_);
    394     if (FLAG_trace) {
    395       // Push the return value on the stack as the parameter.
    396       // Runtime::TraceExit returns its parameter in r0.
    397       __ push(r0);
    398       __ CallRuntime(Runtime::kTraceExit, 1);
    399     }
    400     if (FLAG_interrupt_at_exit || FLAG_self_optimization) {
    401       // Pretend that the exit is a backwards jump to the entry.
    402       int weight = 1;
    403       if (info_->ShouldSelfOptimize()) {
    404         weight = FLAG_interrupt_budget / FLAG_self_opt_count;
    405       } else if (FLAG_weighted_back_edges) {
    406         int distance = masm_->pc_offset();
    407         weight = Min(kMaxBackEdgeWeight,
    408                      Max(1, distance / kCodeSizeMultiplier));
    409       }
    410       EmitProfilingCounterDecrement(weight);
    411       Label ok;
    412       __ b(pl, &ok);
    413       __ push(r0);
    414       if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) {
    415         __ ldr(r2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
    416         __ push(r2);
    417         __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
    418       } else {
    419         InterruptStub stub;
    420         __ CallStub(&stub);
    421       }
    422       __ pop(r0);
    423       EmitProfilingCounterReset();
    424       __ bind(&ok);
    425     }
    426 
    427 #ifdef DEBUG
    428     // Add a label for checking the size of the code used for returning.
    429     Label check_exit_codesize;
    430     masm_->bind(&check_exit_codesize);
    431 #endif
    432     // Make sure that the constant pool is not emitted inside of the return
    433     // sequence.
    434     { Assembler::BlockConstPoolScope block_const_pool(masm_);
    435       // Here we use masm_-> instead of the __ macro to avoid the code coverage
    436       // tool from instrumenting as we rely on the code size here.
    437       int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize;
    438       CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
    439       // TODO(svenpanne) The code below is sometimes 4 words, sometimes 5!
    440       PredictableCodeSizeScope predictable(masm_, -1);
    441       __ RecordJSReturn();
    442       masm_->mov(sp, fp);
    443       int no_frame_start = masm_->pc_offset();
    444       masm_->ldm(ia_w, sp, fp.bit() | lr.bit());
    445       masm_->add(sp, sp, Operand(sp_delta));
    446       masm_->Jump(lr);
    447       info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
    448     }
    449 
    450 #ifdef DEBUG
    451     // Check that the size of the code used for returning is large enough
    452     // for the debugger's requirements.
    453     ASSERT(Assembler::kJSReturnSequenceInstructions <=
    454            masm_->InstructionsGeneratedSince(&check_exit_codesize));
    455 #endif
    456   }
    457 }
    458 
    459 
    460 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
    461   ASSERT(var->IsStackAllocated() || var->IsContextSlot());
    462 }
    463 
    464 
    465 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
    466   ASSERT(var->IsStackAllocated() || var->IsContextSlot());
    467   codegen()->GetVar(result_register(), var);
    468 }
    469 
    470 
    471 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
    472   ASSERT(var->IsStackAllocated() || var->IsContextSlot());
    473   codegen()->GetVar(result_register(), var);
    474   __ push(result_register());
    475 }
    476 
    477 
    478 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
    479   ASSERT(var->IsStackAllocated() || var->IsContextSlot());
    480   // For simplicity we always test the accumulator register.
    481   codegen()->GetVar(result_register(), var);
    482   codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
    483   codegen()->DoTest(this);
    484 }
    485 
    486 
    487 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
    488 }
    489 
    490 
    491 void FullCodeGenerator::AccumulatorValueContext::Plug(
    492     Heap::RootListIndex index) const {
    493   __ LoadRoot(result_register(), index);
    494 }
    495 
    496 
    497 void FullCodeGenerator::StackValueContext::Plug(
    498     Heap::RootListIndex index) const {
    499   __ LoadRoot(result_register(), index);
    500   __ push(result_register());
    501 }
    502 
    503 
    504 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
    505   codegen()->PrepareForBailoutBeforeSplit(condition(),
    506                                           true,
    507                                           true_label_,
    508                                           false_label_);
    509   if (index == Heap::kUndefinedValueRootIndex ||
    510       index == Heap::kNullValueRootIndex ||
    511       index == Heap::kFalseValueRootIndex) {
    512     if (false_label_ != fall_through_) __ b(false_label_);
    513   } else if (index == Heap::kTrueValueRootIndex) {
    514     if (true_label_ != fall_through_) __ b(true_label_);
    515   } else {
    516     __ LoadRoot(result_register(), index);
    517     codegen()->DoTest(this);
    518   }
    519 }
    520 
    521 
    522 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
    523 }
    524 
    525 
    526 void FullCodeGenerator::AccumulatorValueContext::Plug(
    527     Handle<Object> lit) const {
    528   __ mov(result_register(), Operand(lit));
    529 }
    530 
    531 
    532 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
    533   // Immediates cannot be pushed directly.
    534   __ mov(result_register(), Operand(lit));
    535   __ push(result_register());
    536 }
    537 
    538 
    539 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
    540   codegen()->PrepareForBailoutBeforeSplit(condition(),
    541                                           true,
    542                                           true_label_,
    543                                           false_label_);
    544   ASSERT(!lit->IsUndetectableObject());  // There are no undetectable literals.
    545   if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
    546     if (false_label_ != fall_through_) __ b(false_label_);
    547   } else if (lit->IsTrue() || lit->IsJSObject()) {
    548     if (true_label_ != fall_through_) __ b(true_label_);
    549   } else if (lit->IsString()) {
    550     if (String::cast(*lit)->length() == 0) {
    551       if (false_label_ != fall_through_) __ b(false_label_);
    552     } else {
    553       if (true_label_ != fall_through_) __ b(true_label_);
    554     }
    555   } else if (lit->IsSmi()) {
    556     if (Smi::cast(*lit)->value() == 0) {
    557       if (false_label_ != fall_through_) __ b(false_label_);
    558     } else {
    559       if (true_label_ != fall_through_) __ b(true_label_);
    560     }
    561   } else {
    562     // For simplicity we always test the accumulator register.
    563     __ mov(result_register(), Operand(lit));
    564     codegen()->DoTest(this);
    565   }
    566 }
    567 
    568 
    569 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
    570                                                    Register reg) const {
    571   ASSERT(count > 0);
    572   __ Drop(count);
    573 }
    574 
    575 
    576 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
    577     int count,
    578     Register reg) const {
    579   ASSERT(count > 0);
    580   __ Drop(count);
    581   __ Move(result_register(), reg);
    582 }
    583 
    584 
    585 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
    586                                                        Register reg) const {
    587   ASSERT(count > 0);
    588   if (count > 1) __ Drop(count - 1);
    589   __ str(reg, MemOperand(sp, 0));
    590 }
    591 
    592 
    593 void FullCodeGenerator::TestContext::DropAndPlug(int count,
    594                                                  Register reg) const {
    595   ASSERT(count > 0);
    596   // For simplicity we always test the accumulator register.
    597   __ Drop(count);
    598   __ Move(result_register(), reg);
    599   codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
    600   codegen()->DoTest(this);
    601 }
    602 
    603 
    604 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
    605                                             Label* materialize_false) const {
    606   ASSERT(materialize_true == materialize_false);
    607   __ bind(materialize_true);
    608 }
    609 
    610 
    611 void FullCodeGenerator::AccumulatorValueContext::Plug(
    612     Label* materialize_true,
    613     Label* materialize_false) const {
    614   Label done;
    615   __ bind(materialize_true);
    616   __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
    617   __ jmp(&done);
    618   __ bind(materialize_false);
    619   __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
    620   __ bind(&done);
    621 }
    622 
    623 
    624 void FullCodeGenerator::StackValueContext::Plug(
    625     Label* materialize_true,
    626     Label* materialize_false) const {
    627   Label done;
    628   __ bind(materialize_true);
    629   __ LoadRoot(ip, Heap::kTrueValueRootIndex);
    630   __ push(ip);
    631   __ jmp(&done);
    632   __ bind(materialize_false);
    633   __ LoadRoot(ip, Heap::kFalseValueRootIndex);
    634   __ push(ip);
    635   __ bind(&done);
    636 }
    637 
    638 
    639 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
    640                                           Label* materialize_false) const {
    641   ASSERT(materialize_true == true_label_);
    642   ASSERT(materialize_false == false_label_);
    643 }
    644 
    645 
    646 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
    647 }
    648 
    649 
    650 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
    651   Heap::RootListIndex value_root_index =
    652       flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
    653   __ LoadRoot(result_register(), value_root_index);
    654 }
    655 
    656 
    657 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
    658   Heap::RootListIndex value_root_index =
    659       flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
    660   __ LoadRoot(ip, value_root_index);
    661   __ push(ip);
    662 }
    663 
    664 
    665 void FullCodeGenerator::TestContext::Plug(bool flag) const {
    666   codegen()->PrepareForBailoutBeforeSplit(condition(),
    667                                           true,
    668                                           true_label_,
    669                                           false_label_);
    670   if (flag) {
    671     if (true_label_ != fall_through_) __ b(true_label_);
    672   } else {
    673     if (false_label_ != fall_through_) __ b(false_label_);
    674   }
    675 }
    676 
    677 
    678 void FullCodeGenerator::DoTest(Expression* condition,
    679                                Label* if_true,
    680                                Label* if_false,
    681                                Label* fall_through) {
    682   Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
    683   CallIC(ic, RelocInfo::CODE_TARGET, condition->test_id());
    684   __ tst(result_register(), result_register());
    685   Split(ne, if_true, if_false, fall_through);
    686 }
    687 
    688 
    689 void FullCodeGenerator::Split(Condition cond,
    690                               Label* if_true,
    691                               Label* if_false,
    692                               Label* fall_through) {
    693   if (if_false == fall_through) {
    694     __ b(cond, if_true);
    695   } else if (if_true == fall_through) {
    696     __ b(NegateCondition(cond), if_false);
    697   } else {
    698     __ b(cond, if_true);
    699     __ b(if_false);
    700   }
    701 }
    702 
    703 
    704 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
    705   ASSERT(var->IsStackAllocated());
    706   // Offset is negative because higher indexes are at lower addresses.
    707   int offset = -var->index() * kPointerSize;
    708   // Adjust by a (parameter or local) base offset.
    709   if (var->IsParameter()) {
    710     offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
    711   } else {
    712     offset += JavaScriptFrameConstants::kLocal0Offset;
    713   }
    714   return MemOperand(fp, offset);
    715 }
    716 
    717 
    718 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
    719   ASSERT(var->IsContextSlot() || var->IsStackAllocated());
    720   if (var->IsContextSlot()) {
    721     int context_chain_length = scope()->ContextChainLength(var->scope());
    722     __ LoadContext(scratch, context_chain_length);
    723     return ContextOperand(scratch, var->index());
    724   } else {
    725     return StackOperand(var);
    726   }
    727 }
    728 
    729 
    730 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
    731   // Use destination as scratch.
    732   MemOperand location = VarOperand(var, dest);
    733   __ ldr(dest, location);
    734 }
    735 
    736 
    737 void FullCodeGenerator::SetVar(Variable* var,
    738                                Register src,
    739                                Register scratch0,
    740                                Register scratch1) {
    741   ASSERT(var->IsContextSlot() || var->IsStackAllocated());
    742   ASSERT(!scratch0.is(src));
    743   ASSERT(!scratch0.is(scratch1));
    744   ASSERT(!scratch1.is(src));
    745   MemOperand location = VarOperand(var, scratch0);
    746   __ str(src, location);
    747 
    748   // Emit the write barrier code if the location is in the heap.
    749   if (var->IsContextSlot()) {
    750     __ RecordWriteContextSlot(scratch0,
    751                               location.offset(),
    752                               src,
    753                               scratch1,
    754                               kLRHasBeenSaved,
    755                               kDontSaveFPRegs);
    756   }
    757 }
    758 
    759 
    760 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
    761                                                      bool should_normalize,
    762                                                      Label* if_true,
    763                                                      Label* if_false) {
    764   // Only prepare for bailouts before splits if we're in a test
    765   // context. Otherwise, we let the Visit function deal with the
    766   // preparation to avoid preparing with the same AST id twice.
    767   if (!context()->IsTest() || !info_->IsOptimizable()) return;
    768 
    769   Label skip;
    770   if (should_normalize) __ b(&skip);
    771   PrepareForBailout(expr, TOS_REG);
    772   if (should_normalize) {
    773     __ LoadRoot(ip, Heap::kTrueValueRootIndex);
    774     __ cmp(r0, ip);
    775     Split(eq, if_true, if_false, NULL);
    776     __ bind(&skip);
    777   }
    778 }
    779 
    780 
    781 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
    782   // The variable in the declaration always resides in the current function
    783   // context.
    784   ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
    785   if (generate_debug_code_) {
    786     // Check that we're not inside a with or catch context.
    787     __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset));
    788     __ CompareRoot(r1, Heap::kWithContextMapRootIndex);
    789     __ Check(ne, kDeclarationInWithContext);
    790     __ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
    791     __ Check(ne, kDeclarationInCatchContext);
    792   }
    793 }
    794 
    795 
    796 void FullCodeGenerator::VisitVariableDeclaration(
    797     VariableDeclaration* declaration) {
    798   // If it was not possible to allocate the variable at compile time, we
    799   // need to "declare" it at runtime to make sure it actually exists in the
    800   // local context.
    801   VariableProxy* proxy = declaration->proxy();
    802   VariableMode mode = declaration->mode();
    803   Variable* variable = proxy->var();
    804   bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET;
    805   switch (variable->location()) {
    806     case Variable::UNALLOCATED:
    807       globals_->Add(variable->name(), zone());
    808       globals_->Add(variable->binding_needs_init()
    809                         ? isolate()->factory()->the_hole_value()
    810                         : isolate()->factory()->undefined_value(),
    811                     zone());
    812       break;
    813 
    814     case Variable::PARAMETER:
    815     case Variable::LOCAL:
    816       if (hole_init) {
    817         Comment cmnt(masm_, "[ VariableDeclaration");
    818         __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
    819         __ str(ip, StackOperand(variable));
    820       }
    821       break;
    822 
    823     case Variable::CONTEXT:
    824       if (hole_init) {
    825         Comment cmnt(masm_, "[ VariableDeclaration");
    826         EmitDebugCheckDeclarationContext(variable);
    827         __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
    828         __ str(ip, ContextOperand(cp, variable->index()));
    829         // No write barrier since the_hole_value is in old space.
    830         PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
    831       }
    832       break;
    833 
    834     case Variable::LOOKUP: {
    835       Comment cmnt(masm_, "[ VariableDeclaration");
    836       __ mov(r2, Operand(variable->name()));
    837       // Declaration nodes are always introduced in one of four modes.
    838       ASSERT(IsDeclaredVariableMode(mode));
    839       PropertyAttributes attr =
    840           IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
    841       __ mov(r1, Operand(Smi::FromInt(attr)));
    842       // Push initial value, if any.
    843       // Note: For variables we must not push an initial value (such as
    844       // 'undefined') because we may have a (legal) redeclaration and we
    845       // must not destroy the current value.
    846       if (hole_init) {
    847         __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
    848         __ Push(cp, r2, r1, r0);
    849       } else {
    850         __ mov(r0, Operand(Smi::FromInt(0)));  // Indicates no initial value.
    851         __ Push(cp, r2, r1, r0);
    852       }
    853       __ CallRuntime(Runtime::kDeclareContextSlot, 4);
    854       break;
    855     }
    856   }
    857 }
    858 
    859 
    860 void FullCodeGenerator::VisitFunctionDeclaration(
    861     FunctionDeclaration* declaration) {
    862   VariableProxy* proxy = declaration->proxy();
    863   Variable* variable = proxy->var();
    864   switch (variable->location()) {
    865     case Variable::UNALLOCATED: {
    866       globals_->Add(variable->name(), zone());
    867       Handle<SharedFunctionInfo> function =
    868           Compiler::BuildFunctionInfo(declaration->fun(), script());
    869       // Check for stack-overflow exception.
    870       if (function.is_null()) return SetStackOverflow();
    871       globals_->Add(function, zone());
    872       break;
    873     }
    874 
    875     case Variable::PARAMETER:
    876     case Variable::LOCAL: {
    877       Comment cmnt(masm_, "[ FunctionDeclaration");
    878       VisitForAccumulatorValue(declaration->fun());
    879       __ str(result_register(), StackOperand(variable));
    880       break;
    881     }
    882 
    883     case Variable::CONTEXT: {
    884       Comment cmnt(masm_, "[ FunctionDeclaration");
    885       EmitDebugCheckDeclarationContext(variable);
    886       VisitForAccumulatorValue(declaration->fun());
    887       __ str(result_register(), ContextOperand(cp, variable->index()));
    888       int offset = Context::SlotOffset(variable->index());
    889       // We know that we have written a function, which is not a smi.
    890       __ RecordWriteContextSlot(cp,
    891                                 offset,
    892                                 result_register(),
    893                                 r2,
    894                                 kLRHasBeenSaved,
    895                                 kDontSaveFPRegs,
    896                                 EMIT_REMEMBERED_SET,
    897                                 OMIT_SMI_CHECK);
    898       PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
    899       break;
    900     }
    901 
    902     case Variable::LOOKUP: {
    903       Comment cmnt(masm_, "[ FunctionDeclaration");
    904       __ mov(r2, Operand(variable->name()));
    905       __ mov(r1, Operand(Smi::FromInt(NONE)));
    906       __ Push(cp, r2, r1);
    907       // Push initial value for function declaration.
    908       VisitForStackValue(declaration->fun());
    909       __ CallRuntime(Runtime::kDeclareContextSlot, 4);
    910       break;
    911     }
    912   }
    913 }
    914 
    915 
    916 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
    917   Variable* variable = declaration->proxy()->var();
    918   ASSERT(variable->location() == Variable::CONTEXT);
    919   ASSERT(variable->interface()->IsFrozen());
    920 
    921   Comment cmnt(masm_, "[ ModuleDeclaration");
    922   EmitDebugCheckDeclarationContext(variable);
    923 
    924   // Load instance object.
    925   __ LoadContext(r1, scope_->ContextChainLength(scope_->GlobalScope()));
    926   __ ldr(r1, ContextOperand(r1, variable->interface()->Index()));
    927   __ ldr(r1, ContextOperand(r1, Context::EXTENSION_INDEX));
    928 
    929   // Assign it.
    930   __ str(r1, ContextOperand(cp, variable->index()));
    931   // We know that we have written a module, which is not a smi.
    932   __ RecordWriteContextSlot(cp,
    933                             Context::SlotOffset(variable->index()),
    934                             r1,
    935                             r3,
    936                             kLRHasBeenSaved,
    937                             kDontSaveFPRegs,
    938                             EMIT_REMEMBERED_SET,
    939                             OMIT_SMI_CHECK);
    940   PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
    941 
    942   // Traverse into body.
    943   Visit(declaration->module());
    944 }
    945 
    946 
    947 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
    948   VariableProxy* proxy = declaration->proxy();
    949   Variable* variable = proxy->var();
    950   switch (variable->location()) {
    951     case Variable::UNALLOCATED:
    952       // TODO(rossberg)
    953       break;
    954 
    955     case Variable::CONTEXT: {
    956       Comment cmnt(masm_, "[ ImportDeclaration");
    957       EmitDebugCheckDeclarationContext(variable);
    958       // TODO(rossberg)
    959       break;
    960     }
    961 
    962     case Variable::PARAMETER:
    963     case Variable::LOCAL:
    964     case Variable::LOOKUP:
    965       UNREACHABLE();
    966   }
    967 }
    968 
    969 
    970 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
    971   // TODO(rossberg)
    972 }
    973 
    974 
    975 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
    976   // Call the runtime to declare the globals.
    977   // The context is the first argument.
    978   __ mov(r1, Operand(pairs));
    979   __ mov(r0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
    980   __ Push(cp, r1, r0);
    981   __ CallRuntime(Runtime::kDeclareGlobals, 3);
    982   // Return value is ignored.
    983 }
    984 
    985 
    986 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
    987   // Call the runtime to declare the modules.
    988   __ Push(descriptions);
    989   __ CallRuntime(Runtime::kDeclareModules, 1);
    990   // Return value is ignored.
    991 }
    992 
    993 
    994 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
    995   Comment cmnt(masm_, "[ SwitchStatement");
    996   Breakable nested_statement(this, stmt);
    997   SetStatementPosition(stmt);
    998 
    999   // Keep the switch value on the stack until a case matches.
   1000   VisitForStackValue(stmt->tag());
   1001   PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
   1002 
   1003   ZoneList<CaseClause*>* clauses = stmt->cases();
   1004   CaseClause* default_clause = NULL;  // Can occur anywhere in the list.
   1005 
   1006   Label next_test;  // Recycled for each test.
   1007   // Compile all the tests with branches to their bodies.
   1008   for (int i = 0; i < clauses->length(); i++) {
   1009     CaseClause* clause = clauses->at(i);
   1010     clause->body_target()->Unuse();
   1011 
   1012     // The default is not a test, but remember it as final fall through.
   1013     if (clause->is_default()) {
   1014       default_clause = clause;
   1015       continue;
   1016     }
   1017 
   1018     Comment cmnt(masm_, "[ Case comparison");
   1019     __ bind(&next_test);
   1020     next_test.Unuse();
   1021 
   1022     // Compile the label expression.
   1023     VisitForAccumulatorValue(clause->label());
   1024 
   1025     // Perform the comparison as if via '==='.
   1026     __ ldr(r1, MemOperand(sp, 0));  // Switch value.
   1027     bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
   1028     JumpPatchSite patch_site(masm_);
   1029     if (inline_smi_code) {
   1030       Label slow_case;
   1031       __ orr(r2, r1, r0);
   1032       patch_site.EmitJumpIfNotSmi(r2, &slow_case);
   1033 
   1034       __ cmp(r1, r0);
   1035       __ b(ne, &next_test);
   1036       __ Drop(1);  // Switch value is no longer needed.
   1037       __ b(clause->body_target());
   1038       __ bind(&slow_case);
   1039     }
   1040 
   1041     // Record position before stub call for type feedback.
   1042     SetSourcePosition(clause->position());
   1043     Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT);
   1044     CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId());
   1045     patch_site.EmitPatchInfo();
   1046 
   1047     __ cmp(r0, Operand::Zero());
   1048     __ b(ne, &next_test);
   1049     __ Drop(1);  // Switch value is no longer needed.
   1050     __ b(clause->body_target());
   1051   }
   1052 
   1053   // Discard the test value and jump to the default if present, otherwise to
   1054   // the end of the statement.
   1055   __ bind(&next_test);
   1056   __ Drop(1);  // Switch value is no longer needed.
   1057   if (default_clause == NULL) {
   1058     __ b(nested_statement.break_label());
   1059   } else {
   1060     __ b(default_clause->body_target());
   1061   }
   1062 
   1063   // Compile all the case bodies.
   1064   for (int i = 0; i < clauses->length(); i++) {
   1065     Comment cmnt(masm_, "[ Case body");
   1066     CaseClause* clause = clauses->at(i);
   1067     __ bind(clause->body_target());
   1068     PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
   1069     VisitStatements(clause->statements());
   1070   }
   1071 
   1072   __ bind(nested_statement.break_label());
   1073   PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
   1074 }
   1075 
   1076 
   1077 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
   1078   Comment cmnt(masm_, "[ ForInStatement");
   1079   SetStatementPosition(stmt);
   1080 
   1081   Label loop, exit;
   1082   ForIn loop_statement(this, stmt);
   1083   increment_loop_depth();
   1084 
   1085   // Get the object to enumerate over. If the object is null or undefined, skip
   1086   // over the loop.  See ECMA-262 version 5, section 12.6.4.
   1087   VisitForAccumulatorValue(stmt->enumerable());
   1088   __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
   1089   __ cmp(r0, ip);
   1090   __ b(eq, &exit);
   1091   Register null_value = r5;
   1092   __ LoadRoot(null_value, Heap::kNullValueRootIndex);
   1093   __ cmp(r0, null_value);
   1094   __ b(eq, &exit);
   1095 
   1096   PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
   1097 
   1098   // Convert the object to a JS object.
   1099   Label convert, done_convert;
   1100   __ JumpIfSmi(r0, &convert);
   1101   __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
   1102   __ b(ge, &done_convert);
   1103   __ bind(&convert);
   1104   __ push(r0);
   1105   __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
   1106   __ bind(&done_convert);
   1107   __ push(r0);
   1108 
   1109   // Check for proxies.
   1110   Label call_runtime;
   1111   STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
   1112   __ CompareObjectType(r0, r1, r1, LAST_JS_PROXY_TYPE);
   1113   __ b(le, &call_runtime);
   1114 
   1115   // Check cache validity in generated code. This is a fast case for
   1116   // the JSObject::IsSimpleEnum cache validity checks. If we cannot
   1117   // guarantee cache validity, call the runtime system to check cache
   1118   // validity or get the property names in a fixed array.
   1119   __ CheckEnumCache(null_value, &call_runtime);
   1120 
   1121   // The enum cache is valid.  Load the map of the object being
   1122   // iterated over and use the cache for the iteration.
   1123   Label use_cache;
   1124   __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
   1125   __ b(&use_cache);
   1126 
   1127   // Get the set of properties to enumerate.
   1128   __ bind(&call_runtime);
   1129   __ push(r0);  // Duplicate the enumerable object on the stack.
   1130   __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
   1131 
   1132   // If we got a map from the runtime call, we can do a fast
   1133   // modification check. Otherwise, we got a fixed array, and we have
   1134   // to do a slow check.
   1135   Label fixed_array;
   1136   __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
   1137   __ LoadRoot(ip, Heap::kMetaMapRootIndex);
   1138   __ cmp(r2, ip);
   1139   __ b(ne, &fixed_array);
   1140 
   1141   // We got a map in register r0. Get the enumeration cache from it.
   1142   Label no_descriptors;
   1143   __ bind(&use_cache);
   1144 
   1145   __ EnumLength(r1, r0);
   1146   __ cmp(r1, Operand(Smi::FromInt(0)));
   1147   __ b(eq, &no_descriptors);
   1148 
   1149   __ LoadInstanceDescriptors(r0, r2);
   1150   __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheOffset));
   1151   __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheBridgeCacheOffset));
   1152 
   1153   // Set up the four remaining stack slots.
   1154   __ push(r0);  // Map.
   1155   __ mov(r0, Operand(Smi::FromInt(0)));
   1156   // Push enumeration cache, enumeration cache length (as smi) and zero.
   1157   __ Push(r2, r1, r0);
   1158   __ jmp(&loop);
   1159 
   1160   __ bind(&no_descriptors);
   1161   __ Drop(1);
   1162   __ jmp(&exit);
   1163 
   1164   // We got a fixed array in register r0. Iterate through that.
   1165   Label non_proxy;
   1166   __ bind(&fixed_array);
   1167 
   1168   Handle<Cell> cell = isolate()->factory()->NewCell(
   1169       Handle<Object>(Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker),
   1170                      isolate()));
   1171   RecordTypeFeedbackCell(stmt->ForInFeedbackId(), cell);
   1172   __ LoadHeapObject(r1, cell);
   1173   __ mov(r2, Operand(Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker)));
   1174   __ str(r2, FieldMemOperand(r1, Cell::kValueOffset));
   1175 
   1176   __ mov(r1, Operand(Smi::FromInt(1)));  // Smi indicates slow check
   1177   __ ldr(r2, MemOperand(sp, 0 * kPointerSize));  // Get enumerated object
   1178   STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
   1179   __ CompareObjectType(r2, r3, r3, LAST_JS_PROXY_TYPE);
   1180   __ b(gt, &non_proxy);
   1181   __ mov(r1, Operand(Smi::FromInt(0)));  // Zero indicates proxy
   1182   __ bind(&non_proxy);
   1183   __ Push(r1, r0);  // Smi and array
   1184   __ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset));
   1185   __ mov(r0, Operand(Smi::FromInt(0)));
   1186   __ Push(r1, r0);  // Fixed array length (as smi) and initial index.
   1187 
   1188   // Generate code for doing the condition check.
   1189   PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
   1190   __ bind(&loop);
   1191   // Load the current count to r0, load the length to r1.
   1192   __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize));
   1193   __ cmp(r0, r1);  // Compare to the array length.
   1194   __ b(hs, loop_statement.break_label());
   1195 
   1196   // Get the current entry of the array into register r3.
   1197   __ ldr(r2, MemOperand(sp, 2 * kPointerSize));
   1198   __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
   1199   __ ldr(r3, MemOperand::PointerAddressFromSmiKey(r2, r0));
   1200 
   1201   // Get the expected map from the stack or a smi in the
   1202   // permanent slow case into register r2.
   1203   __ ldr(r2, MemOperand(sp, 3 * kPointerSize));
   1204 
   1205   // Check if the expected map still matches that of the enumerable.
   1206   // If not, we may have to filter the key.
   1207   Label update_each;
   1208   __ ldr(r1, MemOperand(sp, 4 * kPointerSize));
   1209   __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
   1210   __ cmp(r4, Operand(r2));
   1211   __ b(eq, &update_each);
   1212 
   1213   // For proxies, no filtering is done.
   1214   // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
   1215   __ cmp(r2, Operand(Smi::FromInt(0)));
   1216   __ b(eq, &update_each);
   1217 
   1218   // Convert the entry to a string or (smi) 0 if it isn't a property
   1219   // any more. If the property has been removed while iterating, we
   1220   // just skip it.
   1221   __ push(r1);  // Enumerable.
   1222   __ push(r3);  // Current entry.
   1223   __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
   1224   __ mov(r3, Operand(r0), SetCC);
   1225   __ b(eq, loop_statement.continue_label());
   1226 
   1227   // Update the 'each' property or variable from the possibly filtered
   1228   // entry in register r3.
   1229   __ bind(&update_each);
   1230   __ mov(result_register(), r3);
   1231   // Perform the assignment as if via '='.
   1232   { EffectContext context(this);
   1233     EmitAssignment(stmt->each());
   1234   }
   1235 
   1236   // Generate code for the body of the loop.
   1237   Visit(stmt->body());
   1238 
   1239   // Generate code for the going to the next element by incrementing
   1240   // the index (smi) stored on top of the stack.
   1241   __ bind(loop_statement.continue_label());
   1242   __ pop(r0);
   1243   __ add(r0, r0, Operand(Smi::FromInt(1)));
   1244   __ push(r0);
   1245 
   1246   EmitBackEdgeBookkeeping(stmt, &loop);
   1247   __ b(&loop);
   1248 
   1249   // Remove the pointers stored on the stack.
   1250   __ bind(loop_statement.break_label());
   1251   __ Drop(5);
   1252 
   1253   // Exit and decrement the loop depth.
   1254   PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
   1255   __ bind(&exit);
   1256   decrement_loop_depth();
   1257 }
   1258 
   1259 
   1260 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
   1261   Comment cmnt(masm_, "[ ForOfStatement");
   1262   SetStatementPosition(stmt);
   1263 
   1264   Iteration loop_statement(this, stmt);
   1265   increment_loop_depth();
   1266 
   1267   // var iterator = iterable[@@iterator]()
   1268   VisitForAccumulatorValue(stmt->assign_iterator());
   1269 
   1270   // As with for-in, skip the loop if the iterator is null or undefined.
   1271   __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
   1272   __ b(eq, loop_statement.break_label());
   1273   __ CompareRoot(r0, Heap::kNullValueRootIndex);
   1274   __ b(eq, loop_statement.break_label());
   1275 
   1276   // Convert the iterator to a JS object.
   1277   Label convert, done_convert;
   1278   __ JumpIfSmi(r0, &convert);
   1279   __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
   1280   __ b(ge, &done_convert);
   1281   __ bind(&convert);
   1282   __ push(r0);
   1283   __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
   1284   __ bind(&done_convert);
   1285   __ push(r0);
   1286 
   1287   // Loop entry.
   1288   __ bind(loop_statement.continue_label());
   1289 
   1290   // result = iterator.next()
   1291   VisitForEffect(stmt->next_result());
   1292 
   1293   // if (result.done) break;
   1294   Label result_not_done;
   1295   VisitForControl(stmt->result_done(),
   1296                   loop_statement.break_label(),
   1297                   &result_not_done,
   1298                   &result_not_done);
   1299   __ bind(&result_not_done);
   1300 
   1301   // each = result.value
   1302   VisitForEffect(stmt->assign_each());
   1303 
   1304   // Generate code for the body of the loop.
   1305   Visit(stmt->body());
   1306 
   1307   // Check stack before looping.
   1308   PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
   1309   EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
   1310   __ jmp(loop_statement.continue_label());
   1311 
   1312   // Exit and decrement the loop depth.
   1313   PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
   1314   __ bind(loop_statement.break_label());
   1315   decrement_loop_depth();
   1316 }
   1317 
   1318 
   1319 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
   1320                                        bool pretenure) {
   1321   // Use the fast case closure allocation code that allocates in new
   1322   // space for nested functions that don't need literals cloning. If
   1323   // we're running with the --always-opt or the --prepare-always-opt
   1324   // flag, we need to use the runtime function so that the new function
   1325   // we are creating here gets a chance to have its code optimized and
   1326   // doesn't just get a copy of the existing unoptimized code.
   1327   if (!FLAG_always_opt &&
   1328       !FLAG_prepare_always_opt &&
   1329       !pretenure &&
   1330       scope()->is_function_scope() &&
   1331       info->num_literals() == 0) {
   1332     FastNewClosureStub stub(info->language_mode(), info->is_generator());
   1333     __ mov(r0, Operand(info));
   1334     __ push(r0);
   1335     __ CallStub(&stub);
   1336   } else {
   1337     __ mov(r0, Operand(info));
   1338     __ LoadRoot(r1, pretenure ? Heap::kTrueValueRootIndex
   1339                               : Heap::kFalseValueRootIndex);
   1340     __ Push(cp, r0, r1);
   1341     __ CallRuntime(Runtime::kNewClosure, 3);
   1342   }
   1343   context()->Plug(r0);
   1344 }
   1345 
   1346 
   1347 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
   1348   Comment cmnt(masm_, "[ VariableProxy");
   1349   EmitVariableLoad(expr);
   1350 }
   1351 
   1352 
   1353 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
   1354                                                       TypeofState typeof_state,
   1355                                                       Label* slow) {
   1356   Register current = cp;
   1357   Register next = r1;
   1358   Register temp = r2;
   1359 
   1360   Scope* s = scope();
   1361   while (s != NULL) {
   1362     if (s->num_heap_slots() > 0) {
   1363       if (s->calls_non_strict_eval()) {
   1364         // Check that extension is NULL.
   1365         __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX));
   1366         __ tst(temp, temp);
   1367         __ b(ne, slow);
   1368       }
   1369       // Load next context in chain.
   1370       __ ldr(next, ContextOperand(current, Context::PREVIOUS_INDEX));
   1371       // Walk the rest of the chain without clobbering cp.
   1372       current = next;
   1373     }
   1374     // If no outer scope calls eval, we do not need to check more
   1375     // context extensions.
   1376     if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break;
   1377     s = s->outer_scope();
   1378   }
   1379 
   1380   if (s->is_eval_scope()) {
   1381     Label loop, fast;
   1382     if (!current.is(next)) {
   1383       __ Move(next, current);
   1384     }
   1385     __ bind(&loop);
   1386     // Terminate at native context.
   1387     __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
   1388     __ LoadRoot(ip, Heap::kNativeContextMapRootIndex);
   1389     __ cmp(temp, ip);
   1390     __ b(eq, &fast);
   1391     // Check that extension is NULL.
   1392     __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX));
   1393     __ tst(temp, temp);
   1394     __ b(ne, slow);
   1395     // Load next context in chain.
   1396     __ ldr(next, ContextOperand(next, Context::PREVIOUS_INDEX));
   1397     __ b(&loop);
   1398     __ bind(&fast);
   1399   }
   1400 
   1401   __ ldr(r0, GlobalObjectOperand());
   1402   __ mov(r2, Operand(var->name()));
   1403   RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
   1404       ? RelocInfo::CODE_TARGET
   1405       : RelocInfo::CODE_TARGET_CONTEXT;
   1406   Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
   1407   CallIC(ic, mode);
   1408 }
   1409 
   1410 
   1411 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
   1412                                                                 Label* slow) {
   1413   ASSERT(var->IsContextSlot());
   1414   Register context = cp;
   1415   Register next = r3;
   1416   Register temp = r4;
   1417 
   1418   for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
   1419     if (s->num_heap_slots() > 0) {
   1420       if (s->calls_non_strict_eval()) {
   1421         // Check that extension is NULL.
   1422         __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
   1423         __ tst(temp, temp);
   1424         __ b(ne, slow);
   1425       }
   1426       __ ldr(next, ContextOperand(context, Context::PREVIOUS_INDEX));
   1427       // Walk the rest of the chain without clobbering cp.
   1428       context = next;
   1429     }
   1430   }
   1431   // Check that last extension is NULL.
   1432   __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
   1433   __ tst(temp, temp);
   1434   __ b(ne, slow);
   1435 
   1436   // This function is used only for loads, not stores, so it's safe to
   1437   // return an cp-based operand (the write barrier cannot be allowed to
   1438   // destroy the cp register).
   1439   return ContextOperand(context, var->index());
   1440 }
   1441 
   1442 
   1443 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
   1444                                                   TypeofState typeof_state,
   1445                                                   Label* slow,
   1446                                                   Label* done) {
   1447   // Generate fast-case code for variables that might be shadowed by
   1448   // eval-introduced variables.  Eval is used a lot without
   1449   // introducing variables.  In those cases, we do not want to
   1450   // perform a runtime call for all variables in the scope
   1451   // containing the eval.
   1452   if (var->mode() == DYNAMIC_GLOBAL) {
   1453     EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
   1454     __ jmp(done);
   1455   } else if (var->mode() == DYNAMIC_LOCAL) {
   1456     Variable* local = var->local_if_not_shadowed();
   1457     __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow));
   1458     if (local->mode() == LET ||
   1459         local->mode() == CONST ||
   1460         local->mode() == CONST_HARMONY) {
   1461       __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
   1462       if (local->mode() == CONST) {
   1463         __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
   1464       } else {  // LET || CONST_HARMONY
   1465         __ b(ne, done);
   1466         __ mov(r0, Operand(var->name()));
   1467         __ push(r0);
   1468         __ CallRuntime(Runtime::kThrowReferenceError, 1);
   1469       }
   1470     }
   1471     __ jmp(done);
   1472   }
   1473 }
   1474 
   1475 
   1476 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
   1477   // Record position before possible IC call.
   1478   SetSourcePosition(proxy->position());
   1479   Variable* var = proxy->var();
   1480 
   1481   // Three cases: global variables, lookup variables, and all other types of
   1482   // variables.
   1483   switch (var->location()) {
   1484     case Variable::UNALLOCATED: {
   1485       Comment cmnt(masm_, "Global variable");
   1486       // Use inline caching. Variable name is passed in r2 and the global
   1487       // object (receiver) in r0.
   1488       __ ldr(r0, GlobalObjectOperand());
   1489       __ mov(r2, Operand(var->name()));
   1490       Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
   1491       CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
   1492       context()->Plug(r0);
   1493       break;
   1494     }
   1495 
   1496     case Variable::PARAMETER:
   1497     case Variable::LOCAL:
   1498     case Variable::CONTEXT: {
   1499       Comment cmnt(masm_, var->IsContextSlot()
   1500                               ? "Context variable"
   1501                               : "Stack variable");
   1502       if (var->binding_needs_init()) {
   1503         // var->scope() may be NULL when the proxy is located in eval code and
   1504         // refers to a potential outside binding. Currently those bindings are
   1505         // always looked up dynamically, i.e. in that case
   1506         //     var->location() == LOOKUP.
   1507         // always holds.
   1508         ASSERT(var->scope() != NULL);
   1509 
   1510         // Check if the binding really needs an initialization check. The check
   1511         // can be skipped in the following situation: we have a LET or CONST
   1512         // binding in harmony mode, both the Variable and the VariableProxy have
   1513         // the same declaration scope (i.e. they are both in global code, in the
   1514         // same function or in the same eval code) and the VariableProxy is in
   1515         // the source physically located after the initializer of the variable.
   1516         //
   1517         // We cannot skip any initialization checks for CONST in non-harmony
   1518         // mode because const variables may be declared but never initialized:
   1519         //   if (false) { const x; }; var y = x;
   1520         //
   1521         // The condition on the declaration scopes is a conservative check for
   1522         // nested functions that access a binding and are called before the
   1523         // binding is initialized:
   1524         //   function() { f(); let x = 1; function f() { x = 2; } }
   1525         //
   1526         bool skip_init_check;
   1527         if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
   1528           skip_init_check = false;
   1529         } else {
   1530           // Check that we always have valid source position.
   1531           ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
   1532           ASSERT(proxy->position() != RelocInfo::kNoPosition);
   1533           skip_init_check = var->mode() != CONST &&
   1534               var->initializer_position() < proxy->position();
   1535         }
   1536 
   1537         if (!skip_init_check) {
   1538           // Let and const need a read barrier.
   1539           GetVar(r0, var);
   1540           __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
   1541           if (var->mode() == LET || var->mode() == CONST_HARMONY) {
   1542             // Throw a reference error when using an uninitialized let/const
   1543             // binding in harmony mode.
   1544             Label done;
   1545             __ b(ne, &done);
   1546             __ mov(r0, Operand(var->name()));
   1547             __ push(r0);
   1548             __ CallRuntime(Runtime::kThrowReferenceError, 1);
   1549             __ bind(&done);
   1550           } else {
   1551             // Uninitalized const bindings outside of harmony mode are unholed.
   1552             ASSERT(var->mode() == CONST);
   1553             __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
   1554           }
   1555           context()->Plug(r0);
   1556           break;
   1557         }
   1558       }
   1559       context()->Plug(var);
   1560       break;
   1561     }
   1562 
   1563     case Variable::LOOKUP: {
   1564       Label done, slow;
   1565       // Generate code for loading from variables potentially shadowed
   1566       // by eval-introduced variables.
   1567       EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
   1568       __ bind(&slow);
   1569       Comment cmnt(masm_, "Lookup variable");
   1570       __ mov(r1, Operand(var->name()));
   1571       __ Push(cp, r1);  // Context and name.
   1572       __ CallRuntime(Runtime::kLoadContextSlot, 2);
   1573       __ bind(&done);
   1574       context()->Plug(r0);
   1575     }
   1576   }
   1577 }
   1578 
   1579 
   1580 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
   1581   Comment cmnt(masm_, "[ RegExpLiteral");
   1582   Label materialized;
   1583   // Registers will be used as follows:
   1584   // r5 = materialized value (RegExp literal)
   1585   // r4 = JS function, literals array
   1586   // r3 = literal index
   1587   // r2 = RegExp pattern
   1588   // r1 = RegExp flags
   1589   // r0 = RegExp literal clone
   1590   __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
   1591   __ ldr(r4, FieldMemOperand(r0, JSFunction::kLiteralsOffset));
   1592   int literal_offset =
   1593       FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
   1594   __ ldr(r5, FieldMemOperand(r4, literal_offset));
   1595   __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
   1596   __ cmp(r5, ip);
   1597   __ b(ne, &materialized);
   1598 
   1599   // Create regexp literal using runtime function.
   1600   // Result will be in r0.
   1601   __ mov(r3, Operand(Smi::FromInt(expr->literal_index())));
   1602   __ mov(r2, Operand(expr->pattern()));
   1603   __ mov(r1, Operand(expr->flags()));
   1604   __ Push(r4, r3, r2, r1);
   1605   __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
   1606   __ mov(r5, r0);
   1607 
   1608   __ bind(&materialized);
   1609   int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
   1610   Label allocated, runtime_allocate;
   1611   __ Allocate(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
   1612   __ jmp(&allocated);
   1613 
   1614   __ bind(&runtime_allocate);
   1615   __ push(r5);
   1616   __ mov(r0, Operand(Smi::FromInt(size)));
   1617   __ push(r0);
   1618   __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
   1619   __ pop(r5);
   1620 
   1621   __ bind(&allocated);
   1622   // After this, registers are used as follows:
   1623   // r0: Newly allocated regexp.
   1624   // r5: Materialized regexp.
   1625   // r2: temp.
   1626   __ CopyFields(r0, r5, d0, size / kPointerSize);
   1627   context()->Plug(r0);
   1628 }
   1629 
   1630 
   1631 void FullCodeGenerator::EmitAccessor(Expression* expression) {
   1632   if (expression == NULL) {
   1633     __ LoadRoot(r1, Heap::kNullValueRootIndex);
   1634     __ push(r1);
   1635   } else {
   1636     VisitForStackValue(expression);
   1637   }
   1638 }
   1639 
   1640 
   1641 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
   1642   Comment cmnt(masm_, "[ ObjectLiteral");
   1643   Handle<FixedArray> constant_properties = expr->constant_properties();
   1644   __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
   1645   __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
   1646   __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
   1647   __ mov(r1, Operand(constant_properties));
   1648   int flags = expr->fast_elements()
   1649       ? ObjectLiteral::kFastElements
   1650       : ObjectLiteral::kNoFlags;
   1651   flags |= expr->has_function()
   1652       ? ObjectLiteral::kHasFunction
   1653       : ObjectLiteral::kNoFlags;
   1654   __ mov(r0, Operand(Smi::FromInt(flags)));
   1655   int properties_count = constant_properties->length() / 2;
   1656   if ((FLAG_track_double_fields && expr->may_store_doubles()) ||
   1657       expr->depth() > 1) {
   1658     __ Push(r3, r2, r1, r0);
   1659     __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
   1660   } else if (Serializer::enabled() || flags != ObjectLiteral::kFastElements ||
   1661       properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
   1662     __ Push(r3, r2, r1, r0);
   1663     __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
   1664   } else {
   1665     FastCloneShallowObjectStub stub(properties_count);
   1666     __ CallStub(&stub);
   1667   }
   1668 
   1669   // If result_saved is true the result is on top of the stack.  If
   1670   // result_saved is false the result is in r0.
   1671   bool result_saved = false;
   1672 
   1673   // Mark all computed expressions that are bound to a key that
   1674   // is shadowed by a later occurrence of the same key. For the
   1675   // marked expressions, no store code is emitted.
   1676   expr->CalculateEmitStore(zone());
   1677 
   1678   AccessorTable accessor_table(zone());
   1679   for (int i = 0; i < expr->properties()->length(); i++) {
   1680     ObjectLiteral::Property* property = expr->properties()->at(i);
   1681     if (property->IsCompileTimeValue()) continue;
   1682 
   1683     Literal* key = property->key();
   1684     Expression* value = property->value();
   1685     if (!result_saved) {
   1686       __ push(r0);  // Save result on stack
   1687       result_saved = true;
   1688     }
   1689     switch (property->kind()) {
   1690       case ObjectLiteral::Property::CONSTANT:
   1691         UNREACHABLE();
   1692       case ObjectLiteral::Property::MATERIALIZED_LITERAL:
   1693         ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value()));
   1694         // Fall through.
   1695       case ObjectLiteral::Property::COMPUTED:
   1696         if (key->value()->IsInternalizedString()) {
   1697           if (property->emit_store()) {
   1698             VisitForAccumulatorValue(value);
   1699             __ mov(r2, Operand(key->value()));
   1700             __ ldr(r1, MemOperand(sp));
   1701             Handle<Code> ic = is_classic_mode()
   1702                 ? isolate()->builtins()->StoreIC_Initialize()
   1703                 : isolate()->builtins()->StoreIC_Initialize_Strict();
   1704             CallIC(ic, RelocInfo::CODE_TARGET, key->LiteralFeedbackId());
   1705             PrepareForBailoutForId(key->id(), NO_REGISTERS);
   1706           } else {
   1707             VisitForEffect(value);
   1708           }
   1709           break;
   1710         }
   1711         // Duplicate receiver on stack.
   1712         __ ldr(r0, MemOperand(sp));
   1713         __ push(r0);
   1714         VisitForStackValue(key);
   1715         VisitForStackValue(value);
   1716         if (property->emit_store()) {
   1717           __ mov(r0, Operand(Smi::FromInt(NONE)));  // PropertyAttributes
   1718           __ push(r0);
   1719           __ CallRuntime(Runtime::kSetProperty, 4);
   1720         } else {
   1721           __ Drop(3);
   1722         }
   1723         break;
   1724       case ObjectLiteral::Property::PROTOTYPE:
   1725         // Duplicate receiver on stack.
   1726         __ ldr(r0, MemOperand(sp));
   1727         __ push(r0);
   1728         VisitForStackValue(value);
   1729         if (property->emit_store()) {
   1730           __ CallRuntime(Runtime::kSetPrototype, 2);
   1731         } else {
   1732           __ Drop(2);
   1733         }
   1734         break;
   1735 
   1736       case ObjectLiteral::Property::GETTER:
   1737         accessor_table.lookup(key)->second->getter = value;
   1738         break;
   1739       case ObjectLiteral::Property::SETTER:
   1740         accessor_table.lookup(key)->second->setter = value;
   1741         break;
   1742     }
   1743   }
   1744 
   1745   // Emit code to define accessors, using only a single call to the runtime for
   1746   // each pair of corresponding getters and setters.
   1747   for (AccessorTable::Iterator it = accessor_table.begin();
   1748        it != accessor_table.end();
   1749        ++it) {
   1750     __ ldr(r0, MemOperand(sp));  // Duplicate receiver.
   1751     __ push(r0);
   1752     VisitForStackValue(it->first);
   1753     EmitAccessor(it->second->getter);
   1754     EmitAccessor(it->second->setter);
   1755     __ mov(r0, Operand(Smi::FromInt(NONE)));
   1756     __ push(r0);
   1757     __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
   1758   }
   1759 
   1760   if (expr->has_function()) {
   1761     ASSERT(result_saved);
   1762     __ ldr(r0, MemOperand(sp));
   1763     __ push(r0);
   1764     __ CallRuntime(Runtime::kToFastProperties, 1);
   1765   }
   1766 
   1767   if (result_saved) {
   1768     context()->PlugTOS();
   1769   } else {
   1770     context()->Plug(r0);
   1771   }
   1772 }
   1773 
   1774 
   1775 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
   1776   Comment cmnt(masm_, "[ ArrayLiteral");
   1777 
   1778   ZoneList<Expression*>* subexprs = expr->values();
   1779   int length = subexprs->length();
   1780   Handle<FixedArray> constant_elements = expr->constant_elements();
   1781   ASSERT_EQ(2, constant_elements->length());
   1782   ElementsKind constant_elements_kind =
   1783       static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
   1784   bool has_fast_elements = IsFastObjectElementsKind(constant_elements_kind);
   1785   Handle<FixedArrayBase> constant_elements_values(
   1786       FixedArrayBase::cast(constant_elements->get(1)));
   1787 
   1788   __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
   1789   __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
   1790   __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
   1791   __ mov(r1, Operand(constant_elements));
   1792   if (has_fast_elements && constant_elements_values->map() ==
   1793       isolate()->heap()->fixed_cow_array_map()) {
   1794     FastCloneShallowArrayStub stub(
   1795         FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
   1796         DONT_TRACK_ALLOCATION_SITE,
   1797         length);
   1798     __ CallStub(&stub);
   1799     __ IncrementCounter(
   1800         isolate()->counters()->cow_arrays_created_stub(), 1, r1, r2);
   1801   } else if (expr->depth() > 1) {
   1802     __ Push(r3, r2, r1);
   1803     __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
   1804   } else if (Serializer::enabled() ||
   1805       length > FastCloneShallowArrayStub::kMaximumClonedLength) {
   1806     __ Push(r3, r2, r1);
   1807     __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
   1808   } else {
   1809     ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
   1810            FLAG_smi_only_arrays);
   1811     FastCloneShallowArrayStub::Mode mode =
   1812         FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
   1813     AllocationSiteMode allocation_site_mode = FLAG_track_allocation_sites
   1814         ? TRACK_ALLOCATION_SITE : DONT_TRACK_ALLOCATION_SITE;
   1815 
   1816     if (has_fast_elements) {
   1817       mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
   1818       allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
   1819     }
   1820 
   1821     FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
   1822     __ CallStub(&stub);
   1823   }
   1824 
   1825   bool result_saved = false;  // Is the result saved to the stack?
   1826 
   1827   // Emit code to evaluate all the non-constant subexpressions and to store
   1828   // them into the newly cloned array.
   1829   for (int i = 0; i < length; i++) {
   1830     Expression* subexpr = subexprs->at(i);
   1831     // If the subexpression is a literal or a simple materialized literal it
   1832     // is already set in the cloned array.
   1833     if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
   1834 
   1835     if (!result_saved) {
   1836       __ push(r0);
   1837       __ Push(Smi::FromInt(expr->literal_index()));
   1838       result_saved = true;
   1839     }
   1840     VisitForAccumulatorValue(subexpr);
   1841 
   1842     if (IsFastObjectElementsKind(constant_elements_kind)) {
   1843       int offset = FixedArray::kHeaderSize + (i * kPointerSize);
   1844       __ ldr(r6, MemOperand(sp, kPointerSize));  // Copy of array literal.
   1845       __ ldr(r1, FieldMemOperand(r6, JSObject::kElementsOffset));
   1846       __ str(result_register(), FieldMemOperand(r1, offset));
   1847       // Update the write barrier for the array store.
   1848       __ RecordWriteField(r1, offset, result_register(), r2,
   1849                           kLRHasBeenSaved, kDontSaveFPRegs,
   1850                           EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
   1851     } else {
   1852       __ mov(r3, Operand(Smi::FromInt(i)));
   1853       StoreArrayLiteralElementStub stub;
   1854       __ CallStub(&stub);
   1855     }
   1856 
   1857     PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
   1858   }
   1859 
   1860   if (result_saved) {
   1861     __ pop();  // literal index
   1862     context()->PlugTOS();
   1863   } else {
   1864     context()->Plug(r0);
   1865   }
   1866 }
   1867 
   1868 
   1869 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
   1870   Comment cmnt(masm_, "[ Assignment");
   1871   // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
   1872   // on the left-hand side.
   1873   if (!expr->target()->IsValidLeftHandSide()) {
   1874     VisitForEffect(expr->target());
   1875     return;
   1876   }
   1877 
   1878   // Left-hand side can only be a property, a global or a (parameter or local)
   1879   // slot.
   1880   enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
   1881   LhsKind assign_type = VARIABLE;
   1882   Property* property = expr->target()->AsProperty();
   1883   if (property != NULL) {
   1884     assign_type = (property->key()->IsPropertyName())
   1885         ? NAMED_PROPERTY
   1886         : KEYED_PROPERTY;
   1887   }
   1888 
   1889   // Evaluate LHS expression.
   1890   switch (assign_type) {
   1891     case VARIABLE:
   1892       // Nothing to do here.
   1893       break;
   1894     case NAMED_PROPERTY:
   1895       if (expr->is_compound()) {
   1896         // We need the receiver both on the stack and in the accumulator.
   1897         VisitForAccumulatorValue(property->obj());
   1898         __ push(result_register());
   1899       } else {
   1900         VisitForStackValue(property->obj());
   1901       }
   1902       break;
   1903     case KEYED_PROPERTY:
   1904       if (expr->is_compound()) {
   1905         VisitForStackValue(property->obj());
   1906         VisitForAccumulatorValue(property->key());
   1907         __ ldr(r1, MemOperand(sp, 0));
   1908         __ push(r0);
   1909       } else {
   1910         VisitForStackValue(property->obj());
   1911         VisitForStackValue(property->key());
   1912       }
   1913       break;
   1914   }
   1915 
   1916   // For compound assignments we need another deoptimization point after the
   1917   // variable/property load.
   1918   if (expr->is_compound()) {
   1919     { AccumulatorValueContext context(this);
   1920       switch (assign_type) {
   1921         case VARIABLE:
   1922           EmitVariableLoad(expr->target()->AsVariableProxy());
   1923           PrepareForBailout(expr->target(), TOS_REG);
   1924           break;
   1925         case NAMED_PROPERTY:
   1926           EmitNamedPropertyLoad(property);
   1927           PrepareForBailoutForId(property->LoadId(), TOS_REG);
   1928           break;
   1929         case KEYED_PROPERTY:
   1930           EmitKeyedPropertyLoad(property);
   1931           PrepareForBailoutForId(property->LoadId(), TOS_REG);
   1932           break;
   1933       }
   1934     }
   1935 
   1936     Token::Value op = expr->binary_op();
   1937     __ push(r0);  // Left operand goes on the stack.
   1938     VisitForAccumulatorValue(expr->value());
   1939 
   1940     OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
   1941         ? OVERWRITE_RIGHT
   1942         : NO_OVERWRITE;
   1943     SetSourcePosition(expr->position() + 1);
   1944     AccumulatorValueContext context(this);
   1945     if (ShouldInlineSmiCase(op)) {
   1946       EmitInlineSmiBinaryOp(expr->binary_operation(),
   1947                             op,
   1948                             mode,
   1949                             expr->target(),
   1950                             expr->value());
   1951     } else {
   1952       EmitBinaryOp(expr->binary_operation(), op, mode);
   1953     }
   1954 
   1955     // Deoptimization point in case the binary operation may have side effects.
   1956     PrepareForBailout(expr->binary_operation(), TOS_REG);
   1957   } else {
   1958     VisitForAccumulatorValue(expr->value());
   1959   }
   1960 
   1961   // Record source position before possible IC call.
   1962   SetSourcePosition(expr->position());
   1963 
   1964   // Store the value.
   1965   switch (assign_type) {
   1966     case VARIABLE:
   1967       EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
   1968                              expr->op());
   1969       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   1970       context()->Plug(r0);
   1971       break;
   1972     case NAMED_PROPERTY:
   1973       EmitNamedPropertyAssignment(expr);
   1974       break;
   1975     case KEYED_PROPERTY:
   1976       EmitKeyedPropertyAssignment(expr);
   1977       break;
   1978   }
   1979 }
   1980 
   1981 
   1982 void FullCodeGenerator::VisitYield(Yield* expr) {
   1983   Comment cmnt(masm_, "[ Yield");
   1984   // Evaluate yielded value first; the initial iterator definition depends on
   1985   // this.  It stays on the stack while we update the iterator.
   1986   VisitForStackValue(expr->expression());
   1987 
   1988   switch (expr->yield_kind()) {
   1989     case Yield::SUSPEND:
   1990       // Pop value from top-of-stack slot; box result into result register.
   1991       EmitCreateIteratorResult(false);
   1992       __ push(result_register());
   1993       // Fall through.
   1994     case Yield::INITIAL: {
   1995       Label suspend, continuation, post_runtime, resume;
   1996 
   1997       __ jmp(&suspend);
   1998 
   1999       __ bind(&continuation);
   2000       __ jmp(&resume);
   2001 
   2002       __ bind(&suspend);
   2003       VisitForAccumulatorValue(expr->generator_object());
   2004       ASSERT(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
   2005       __ mov(r1, Operand(Smi::FromInt(continuation.pos())));
   2006       __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset));
   2007       __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset));
   2008       __ mov(r1, cp);
   2009       __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2,
   2010                           kLRHasBeenSaved, kDontSaveFPRegs);
   2011       __ add(r1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
   2012       __ cmp(sp, r1);
   2013       __ b(eq, &post_runtime);
   2014       __ push(r0);  // generator object
   2015       __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
   2016       __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
   2017       __ bind(&post_runtime);
   2018       __ pop(result_register());
   2019       EmitReturnSequence();
   2020 
   2021       __ bind(&resume);
   2022       context()->Plug(result_register());
   2023       break;
   2024     }
   2025 
   2026     case Yield::FINAL: {
   2027       VisitForAccumulatorValue(expr->generator_object());
   2028       __ mov(r1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
   2029       __ str(r1, FieldMemOperand(result_register(),
   2030                                  JSGeneratorObject::kContinuationOffset));
   2031       // Pop value from top-of-stack slot, box result into result register.
   2032       EmitCreateIteratorResult(true);
   2033       EmitUnwindBeforeReturn();
   2034       EmitReturnSequence();
   2035       break;
   2036     }
   2037 
   2038     case Yield::DELEGATING: {
   2039       VisitForStackValue(expr->generator_object());
   2040 
   2041       // Initial stack layout is as follows:
   2042       // [sp + 1 * kPointerSize] iter
   2043       // [sp + 0 * kPointerSize] g
   2044 
   2045       Label l_catch, l_try, l_suspend, l_continuation, l_resume;
   2046       Label l_next, l_call, l_loop;
   2047       // Initial send value is undefined.
   2048       __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
   2049       __ b(&l_next);
   2050 
   2051       // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
   2052       __ bind(&l_catch);
   2053       handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
   2054       __ LoadRoot(r2, Heap::kthrow_stringRootIndex);     // "throw"
   2055       __ ldr(r3, MemOperand(sp, 1 * kPointerSize));      // iter
   2056       __ push(r3);                                       // iter
   2057       __ push(r0);                                       // exception
   2058       __ jmp(&l_call);
   2059 
   2060       // try { received = %yield result }
   2061       // Shuffle the received result above a try handler and yield it without
   2062       // re-boxing.
   2063       __ bind(&l_try);
   2064       __ pop(r0);                                        // result
   2065       __ PushTryHandler(StackHandler::CATCH, expr->index());
   2066       const int handler_size = StackHandlerConstants::kSize;
   2067       __ push(r0);                                       // result
   2068       __ jmp(&l_suspend);
   2069       __ bind(&l_continuation);
   2070       __ jmp(&l_resume);
   2071       __ bind(&l_suspend);
   2072       const int generator_object_depth = kPointerSize + handler_size;
   2073       __ ldr(r0, MemOperand(sp, generator_object_depth));
   2074       __ push(r0);                                       // g
   2075       ASSERT(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
   2076       __ mov(r1, Operand(Smi::FromInt(l_continuation.pos())));
   2077       __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset));
   2078       __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset));
   2079       __ mov(r1, cp);
   2080       __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2,
   2081                           kLRHasBeenSaved, kDontSaveFPRegs);
   2082       __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
   2083       __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
   2084       __ pop(r0);                                        // result
   2085       EmitReturnSequence();
   2086       __ bind(&l_resume);                                // received in r0
   2087       __ PopTryHandler();
   2088 
   2089       // receiver = iter; f = 'next'; arg = received;
   2090       __ bind(&l_next);
   2091       __ LoadRoot(r2, Heap::knext_stringRootIndex);      // "next"
   2092       __ ldr(r3, MemOperand(sp, 1 * kPointerSize));      // iter
   2093       __ push(r3);                                       // iter
   2094       __ push(r0);                                       // received
   2095 
   2096       // result = receiver[f](arg);
   2097       __ bind(&l_call);
   2098       Handle<Code> ic = isolate()->stub_cache()->ComputeKeyedCallInitialize(1);
   2099       CallIC(ic);
   2100       __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
   2101 
   2102       // if (!result.done) goto l_try;
   2103       __ bind(&l_loop);
   2104       __ push(r0);                                       // save result
   2105       __ LoadRoot(r2, Heap::kdone_stringRootIndex);      // "done"
   2106       Handle<Code> done_ic = isolate()->builtins()->LoadIC_Initialize();
   2107       CallIC(done_ic);                                   // result.done in r0
   2108       Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
   2109       CallIC(bool_ic);
   2110       __ cmp(r0, Operand(0));
   2111       __ b(eq, &l_try);
   2112 
   2113       // result.value
   2114       __ pop(r0);                                        // result
   2115       __ LoadRoot(r2, Heap::kvalue_stringRootIndex);     // "value"
   2116       Handle<Code> value_ic = isolate()->builtins()->LoadIC_Initialize();
   2117       CallIC(value_ic);                                  // result.value in r0
   2118       context()->DropAndPlug(2, r0);                     // drop iter and g
   2119       break;
   2120     }
   2121   }
   2122 }
   2123 
   2124 
   2125 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
   2126     Expression *value,
   2127     JSGeneratorObject::ResumeMode resume_mode) {
   2128   // The value stays in r0, and is ultimately read by the resumed generator, as
   2129   // if the CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it.  r1
   2130   // will hold the generator object until the activation has been resumed.
   2131   VisitForStackValue(generator);
   2132   VisitForAccumulatorValue(value);
   2133   __ pop(r1);
   2134 
   2135   // Check generator state.
   2136   Label wrong_state, done;
   2137   __ ldr(r3, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
   2138   STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting <= 0);
   2139   STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed <= 0);
   2140   __ cmp(r3, Operand(Smi::FromInt(0)));
   2141   __ b(le, &wrong_state);
   2142 
   2143   // Load suspended function and context.
   2144   __ ldr(cp, FieldMemOperand(r1, JSGeneratorObject::kContextOffset));
   2145   __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset));
   2146 
   2147   // Load receiver and store as the first argument.
   2148   __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kReceiverOffset));
   2149   __ push(r2);
   2150 
   2151   // Push holes for the rest of the arguments to the generator function.
   2152   __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
   2153   __ ldr(r3,
   2154          FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
   2155   __ LoadRoot(r2, Heap::kTheHoleValueRootIndex);
   2156   Label push_argument_holes, push_frame;
   2157   __ bind(&push_argument_holes);
   2158   __ sub(r3, r3, Operand(Smi::FromInt(1)), SetCC);
   2159   __ b(mi, &push_frame);
   2160   __ push(r2);
   2161   __ jmp(&push_argument_holes);
   2162 
   2163   // Enter a new JavaScript frame, and initialize its slots as they were when
   2164   // the generator was suspended.
   2165   Label resume_frame;
   2166   __ bind(&push_frame);
   2167   __ bl(&resume_frame);
   2168   __ jmp(&done);
   2169   __ bind(&resume_frame);
   2170   __ push(lr);  // Return address.
   2171   __ push(fp);  // Caller's frame pointer.
   2172   __ mov(fp, sp);
   2173   __ push(cp);  // Callee's context.
   2174   __ push(r4);  // Callee's JS Function.
   2175 
   2176   // Load the operand stack size.
   2177   __ ldr(r3, FieldMemOperand(r1, JSGeneratorObject::kOperandStackOffset));
   2178   __ ldr(r3, FieldMemOperand(r3, FixedArray::kLengthOffset));
   2179   __ SmiUntag(r3);
   2180 
   2181   // If we are sending a value and there is no operand stack, we can jump back
   2182   // in directly.
   2183   if (resume_mode == JSGeneratorObject::NEXT) {
   2184     Label slow_resume;
   2185     __ cmp(r3, Operand(0));
   2186     __ b(ne, &slow_resume);
   2187     __ ldr(r3, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
   2188     __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
   2189     __ SmiUntag(r2);
   2190     __ add(r3, r3, r2);
   2191     __ mov(r2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
   2192     __ str(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
   2193     __ Jump(r3);
   2194     __ bind(&slow_resume);
   2195   }
   2196 
   2197   // Otherwise, we push holes for the operand stack and call the runtime to fix
   2198   // up the stack and the handlers.
   2199   Label push_operand_holes, call_resume;
   2200   __ bind(&push_operand_holes);
   2201   __ sub(r3, r3, Operand(1), SetCC);
   2202   __ b(mi, &call_resume);
   2203   __ push(r2);
   2204   __ b(&push_operand_holes);
   2205   __ bind(&call_resume);
   2206   __ push(r1);
   2207   __ push(result_register());
   2208   __ Push(Smi::FromInt(resume_mode));
   2209   __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
   2210   // Not reached: the runtime call returns elsewhere.
   2211   __ stop("not-reached");
   2212 
   2213   // Throw error if we attempt to operate on a running generator.
   2214   __ bind(&wrong_state);
   2215   __ push(r1);
   2216   __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
   2217 
   2218   __ bind(&done);
   2219   context()->Plug(result_register());
   2220 }
   2221 
   2222 
   2223 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
   2224   Label gc_required;
   2225   Label allocated;
   2226 
   2227   Handle<Map> map(isolate()->native_context()->generator_result_map());
   2228 
   2229   __ Allocate(map->instance_size(), r0, r2, r3, &gc_required, TAG_OBJECT);
   2230   __ jmp(&allocated);
   2231 
   2232   __ bind(&gc_required);
   2233   __ Push(Smi::FromInt(map->instance_size()));
   2234   __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
   2235   __ ldr(context_register(),
   2236          MemOperand(fp, StandardFrameConstants::kContextOffset));
   2237 
   2238   __ bind(&allocated);
   2239   __ mov(r1, Operand(map));
   2240   __ pop(r2);
   2241   __ mov(r3, Operand(isolate()->factory()->ToBoolean(done)));
   2242   __ mov(r4, Operand(isolate()->factory()->empty_fixed_array()));
   2243   ASSERT_EQ(map->instance_size(), 5 * kPointerSize);
   2244   __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
   2245   __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset));
   2246   __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
   2247   __ str(r2,
   2248          FieldMemOperand(r0, JSGeneratorObject::kResultValuePropertyOffset));
   2249   __ str(r3,
   2250          FieldMemOperand(r0, JSGeneratorObject::kResultDonePropertyOffset));
   2251 
   2252   // Only the value field needs a write barrier, as the other values are in the
   2253   // root set.
   2254   __ RecordWriteField(r0, JSGeneratorObject::kResultValuePropertyOffset,
   2255                       r2, r3, kLRHasBeenSaved, kDontSaveFPRegs);
   2256 }
   2257 
   2258 
   2259 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
   2260   SetSourcePosition(prop->position());
   2261   Literal* key = prop->key()->AsLiteral();
   2262   __ mov(r2, Operand(key->value()));
   2263   // Call load IC. It has arguments receiver and property name r0 and r2.
   2264   Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
   2265   CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
   2266 }
   2267 
   2268 
   2269 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
   2270   SetSourcePosition(prop->position());
   2271   // Call keyed load IC. It has arguments key and receiver in r0 and r1.
   2272   Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
   2273   CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
   2274 }
   2275 
   2276 
   2277 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
   2278                                               Token::Value op,
   2279                                               OverwriteMode mode,
   2280                                               Expression* left_expr,
   2281                                               Expression* right_expr) {
   2282   Label done, smi_case, stub_call;
   2283 
   2284   Register scratch1 = r2;
   2285   Register scratch2 = r3;
   2286 
   2287   // Get the arguments.
   2288   Register left = r1;
   2289   Register right = r0;
   2290   __ pop(left);
   2291 
   2292   // Perform combined smi check on both operands.
   2293   __ orr(scratch1, left, Operand(right));
   2294   STATIC_ASSERT(kSmiTag == 0);
   2295   JumpPatchSite patch_site(masm_);
   2296   patch_site.EmitJumpIfSmi(scratch1, &smi_case);
   2297 
   2298   __ bind(&stub_call);
   2299   BinaryOpStub stub(op, mode);
   2300   CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET,
   2301          expr->BinaryOperationFeedbackId());
   2302   patch_site.EmitPatchInfo();
   2303   __ jmp(&done);
   2304 
   2305   __ bind(&smi_case);
   2306   // Smi case. This code works the same way as the smi-smi case in the type
   2307   // recording binary operation stub, see
   2308   // BinaryOpStub::GenerateSmiSmiOperation for comments.
   2309   switch (op) {
   2310     case Token::SAR:
   2311       __ GetLeastBitsFromSmi(scratch1, right, 5);
   2312       __ mov(right, Operand(left, ASR, scratch1));
   2313       __ bic(right, right, Operand(kSmiTagMask));
   2314       break;
   2315     case Token::SHL: {
   2316       __ SmiUntag(scratch1, left);
   2317       __ GetLeastBitsFromSmi(scratch2, right, 5);
   2318       __ mov(scratch1, Operand(scratch1, LSL, scratch2));
   2319       __ TrySmiTag(right, scratch1, &stub_call);
   2320       break;
   2321     }
   2322     case Token::SHR: {
   2323       __ SmiUntag(scratch1, left);
   2324       __ GetLeastBitsFromSmi(scratch2, right, 5);
   2325       __ mov(scratch1, Operand(scratch1, LSR, scratch2));
   2326       __ tst(scratch1, Operand(0xc0000000));
   2327       __ b(ne, &stub_call);
   2328       __ SmiTag(right, scratch1);
   2329       break;
   2330     }
   2331     case Token::ADD:
   2332       __ add(scratch1, left, Operand(right), SetCC);
   2333       __ b(vs, &stub_call);
   2334       __ mov(right, scratch1);
   2335       break;
   2336     case Token::SUB:
   2337       __ sub(scratch1, left, Operand(right), SetCC);
   2338       __ b(vs, &stub_call);
   2339       __ mov(right, scratch1);
   2340       break;
   2341     case Token::MUL: {
   2342       __ SmiUntag(ip, right);
   2343       __ smull(scratch1, scratch2, left, ip);
   2344       __ mov(ip, Operand(scratch1, ASR, 31));
   2345       __ cmp(ip, Operand(scratch2));
   2346       __ b(ne, &stub_call);
   2347       __ cmp(scratch1, Operand::Zero());
   2348       __ mov(right, Operand(scratch1), LeaveCC, ne);
   2349       __ b(ne, &done);
   2350       __ add(scratch2, right, Operand(left), SetCC);
   2351       __ mov(right, Operand(Smi::FromInt(0)), LeaveCC, pl);
   2352       __ b(mi, &stub_call);
   2353       break;
   2354     }
   2355     case Token::BIT_OR:
   2356       __ orr(right, left, Operand(right));
   2357       break;
   2358     case Token::BIT_AND:
   2359       __ and_(right, left, Operand(right));
   2360       break;
   2361     case Token::BIT_XOR:
   2362       __ eor(right, left, Operand(right));
   2363       break;
   2364     default:
   2365       UNREACHABLE();
   2366   }
   2367 
   2368   __ bind(&done);
   2369   context()->Plug(r0);
   2370 }
   2371 
   2372 
   2373 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
   2374                                      Token::Value op,
   2375                                      OverwriteMode mode) {
   2376   __ pop(r1);
   2377   BinaryOpStub stub(op, mode);
   2378   JumpPatchSite patch_site(masm_);    // unbound, signals no inlined smi code.
   2379   CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET,
   2380          expr->BinaryOperationFeedbackId());
   2381   patch_site.EmitPatchInfo();
   2382   context()->Plug(r0);
   2383 }
   2384 
   2385 
   2386 void FullCodeGenerator::EmitAssignment(Expression* expr) {
   2387   // Invalid left-hand sides are rewritten by the parser to have a 'throw
   2388   // ReferenceError' on the left-hand side.
   2389   if (!expr->IsValidLeftHandSide()) {
   2390     VisitForEffect(expr);
   2391     return;
   2392   }
   2393 
   2394   // Left-hand side can only be a property, a global or a (parameter or local)
   2395   // slot.
   2396   enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
   2397   LhsKind assign_type = VARIABLE;
   2398   Property* prop = expr->AsProperty();
   2399   if (prop != NULL) {
   2400     assign_type = (prop->key()->IsPropertyName())
   2401         ? NAMED_PROPERTY
   2402         : KEYED_PROPERTY;
   2403   }
   2404 
   2405   switch (assign_type) {
   2406     case VARIABLE: {
   2407       Variable* var = expr->AsVariableProxy()->var();
   2408       EffectContext context(this);
   2409       EmitVariableAssignment(var, Token::ASSIGN);
   2410       break;
   2411     }
   2412     case NAMED_PROPERTY: {
   2413       __ push(r0);  // Preserve value.
   2414       VisitForAccumulatorValue(prop->obj());
   2415       __ mov(r1, r0);
   2416       __ pop(r0);  // Restore value.
   2417       __ mov(r2, Operand(prop->key()->AsLiteral()->value()));
   2418       Handle<Code> ic = is_classic_mode()
   2419           ? isolate()->builtins()->StoreIC_Initialize()
   2420           : isolate()->builtins()->StoreIC_Initialize_Strict();
   2421       CallIC(ic);
   2422       break;
   2423     }
   2424     case KEYED_PROPERTY: {
   2425       __ push(r0);  // Preserve value.
   2426       VisitForStackValue(prop->obj());
   2427       VisitForAccumulatorValue(prop->key());
   2428       __ mov(r1, r0);
   2429       __ pop(r2);
   2430       __ pop(r0);  // Restore value.
   2431       Handle<Code> ic = is_classic_mode()
   2432           ? isolate()->builtins()->KeyedStoreIC_Initialize()
   2433           : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
   2434       CallIC(ic);
   2435       break;
   2436     }
   2437   }
   2438   context()->Plug(r0);
   2439 }
   2440 
   2441 
   2442 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
   2443                                                Token::Value op) {
   2444   if (var->IsUnallocated()) {
   2445     // Global var, const, or let.
   2446     __ mov(r2, Operand(var->name()));
   2447     __ ldr(r1, GlobalObjectOperand());
   2448     Handle<Code> ic = is_classic_mode()
   2449         ? isolate()->builtins()->StoreIC_Initialize()
   2450         : isolate()->builtins()->StoreIC_Initialize_Strict();
   2451     CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
   2452 
   2453   } else if (op == Token::INIT_CONST) {
   2454     // Const initializers need a write barrier.
   2455     ASSERT(!var->IsParameter());  // No const parameters.
   2456     if (var->IsStackLocal()) {
   2457       Label skip;
   2458       __ ldr(r1, StackOperand(var));
   2459       __ CompareRoot(r1, Heap::kTheHoleValueRootIndex);
   2460       __ b(ne, &skip);
   2461       __ str(result_register(), StackOperand(var));
   2462       __ bind(&skip);
   2463     } else {
   2464       ASSERT(var->IsContextSlot() || var->IsLookupSlot());
   2465       // Like var declarations, const declarations are hoisted to function
   2466       // scope.  However, unlike var initializers, const initializers are
   2467       // able to drill a hole to that function context, even from inside a
   2468       // 'with' context.  We thus bypass the normal static scope lookup for
   2469       // var->IsContextSlot().
   2470       __ push(r0);
   2471       __ mov(r0, Operand(var->name()));
   2472       __ Push(cp, r0);  // Context and name.
   2473       __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
   2474     }
   2475 
   2476   } else if (var->mode() == LET && op != Token::INIT_LET) {
   2477     // Non-initializing assignment to let variable needs a write barrier.
   2478     if (var->IsLookupSlot()) {
   2479       __ push(r0);  // Value.
   2480       __ mov(r1, Operand(var->name()));
   2481       __ mov(r0, Operand(Smi::FromInt(language_mode())));
   2482       __ Push(cp, r1, r0);  // Context, name, strict mode.
   2483       __ CallRuntime(Runtime::kStoreContextSlot, 4);
   2484     } else {
   2485       ASSERT(var->IsStackAllocated() || var->IsContextSlot());
   2486       Label assign;
   2487       MemOperand location = VarOperand(var, r1);
   2488       __ ldr(r3, location);
   2489       __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
   2490       __ b(ne, &assign);
   2491       __ mov(r3, Operand(var->name()));
   2492       __ push(r3);
   2493       __ CallRuntime(Runtime::kThrowReferenceError, 1);
   2494       // Perform the assignment.
   2495       __ bind(&assign);
   2496       __ str(result_register(), location);
   2497       if (var->IsContextSlot()) {
   2498         // RecordWrite may destroy all its register arguments.
   2499         __ mov(r3, result_register());
   2500         int offset = Context::SlotOffset(var->index());
   2501         __ RecordWriteContextSlot(
   2502             r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
   2503       }
   2504     }
   2505 
   2506   } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
   2507     // Assignment to var or initializing assignment to let/const
   2508     // in harmony mode.
   2509     if (var->IsStackAllocated() || var->IsContextSlot()) {
   2510       MemOperand location = VarOperand(var, r1);
   2511       if (generate_debug_code_ && op == Token::INIT_LET) {
   2512         // Check for an uninitialized let binding.
   2513         __ ldr(r2, location);
   2514         __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
   2515         __ Check(eq, kLetBindingReInitialization);
   2516       }
   2517       // Perform the assignment.
   2518       __ str(r0, location);
   2519       if (var->IsContextSlot()) {
   2520         __ mov(r3, r0);
   2521         int offset = Context::SlotOffset(var->index());
   2522         __ RecordWriteContextSlot(
   2523             r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
   2524       }
   2525     } else {
   2526       ASSERT(var->IsLookupSlot());
   2527       __ push(r0);  // Value.
   2528       __ mov(r1, Operand(var->name()));
   2529       __ mov(r0, Operand(Smi::FromInt(language_mode())));
   2530       __ Push(cp, r1, r0);  // Context, name, strict mode.
   2531       __ CallRuntime(Runtime::kStoreContextSlot, 4);
   2532     }
   2533   }
   2534   // Non-initializing assignments to consts are ignored.
   2535 }
   2536 
   2537 
   2538 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
   2539   // Assignment to a property, using a named store IC.
   2540   Property* prop = expr->target()->AsProperty();
   2541   ASSERT(prop != NULL);
   2542   ASSERT(prop->key()->AsLiteral() != NULL);
   2543 
   2544   // Record source code position before IC call.
   2545   SetSourcePosition(expr->position());
   2546   __ mov(r2, Operand(prop->key()->AsLiteral()->value()));
   2547   __ pop(r1);
   2548 
   2549   Handle<Code> ic = is_classic_mode()
   2550       ? isolate()->builtins()->StoreIC_Initialize()
   2551       : isolate()->builtins()->StoreIC_Initialize_Strict();
   2552   CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
   2553 
   2554   PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   2555   context()->Plug(r0);
   2556 }
   2557 
   2558 
   2559 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
   2560   // Assignment to a property, using a keyed store IC.
   2561 
   2562   // Record source code position before IC call.
   2563   SetSourcePosition(expr->position());
   2564   __ pop(r1);  // Key.
   2565   __ pop(r2);
   2566 
   2567   Handle<Code> ic = is_classic_mode()
   2568       ? isolate()->builtins()->KeyedStoreIC_Initialize()
   2569       : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
   2570   CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
   2571 
   2572   PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   2573   context()->Plug(r0);
   2574 }
   2575 
   2576 
   2577 void FullCodeGenerator::VisitProperty(Property* expr) {
   2578   Comment cmnt(masm_, "[ Property");
   2579   Expression* key = expr->key();
   2580 
   2581   if (key->IsPropertyName()) {
   2582     VisitForAccumulatorValue(expr->obj());
   2583     EmitNamedPropertyLoad(expr);
   2584     PrepareForBailoutForId(expr->LoadId(), TOS_REG);
   2585     context()->Plug(r0);
   2586   } else {
   2587     VisitForStackValue(expr->obj());
   2588     VisitForAccumulatorValue(expr->key());
   2589     __ pop(r1);
   2590     EmitKeyedPropertyLoad(expr);
   2591     context()->Plug(r0);
   2592   }
   2593 }
   2594 
   2595 
   2596 void FullCodeGenerator::CallIC(Handle<Code> code,
   2597                                RelocInfo::Mode rmode,
   2598                                TypeFeedbackId ast_id) {
   2599   ic_total_count_++;
   2600   // All calls must have a predictable size in full-codegen code to ensure that
   2601   // the debugger can patch them correctly.
   2602   __ Call(code, rmode, ast_id, al, NEVER_INLINE_TARGET_ADDRESS);
   2603 }
   2604 
   2605 void FullCodeGenerator::EmitCallWithIC(Call* expr,
   2606                                        Handle<Object> name,
   2607                                        RelocInfo::Mode mode) {
   2608   // Code common for calls using the IC.
   2609   ZoneList<Expression*>* args = expr->arguments();
   2610   int arg_count = args->length();
   2611   { PreservePositionScope scope(masm()->positions_recorder());
   2612     for (int i = 0; i < arg_count; i++) {
   2613       VisitForStackValue(args->at(i));
   2614     }
   2615     __ mov(r2, Operand(name));
   2616   }
   2617   // Record source position for debugger.
   2618   SetSourcePosition(expr->position());
   2619   // Call the IC initialization code.
   2620   Handle<Code> ic =
   2621       isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
   2622   CallIC(ic, mode, expr->CallFeedbackId());
   2623   RecordJSReturnSite(expr);
   2624   // Restore context register.
   2625   __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
   2626   context()->Plug(r0);
   2627 }
   2628 
   2629 
   2630 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
   2631                                             Expression* key) {
   2632   // Load the key.
   2633   VisitForAccumulatorValue(key);
   2634 
   2635   // Swap the name of the function and the receiver on the stack to follow
   2636   // the calling convention for call ICs.
   2637   __ pop(r1);
   2638   __ push(r0);
   2639   __ push(r1);
   2640 
   2641   // Code common for calls using the IC.
   2642   ZoneList<Expression*>* args = expr->arguments();
   2643   int arg_count = args->length();
   2644   { PreservePositionScope scope(masm()->positions_recorder());
   2645     for (int i = 0; i < arg_count; i++) {
   2646       VisitForStackValue(args->at(i));
   2647     }
   2648   }
   2649   // Record source position for debugger.
   2650   SetSourcePosition(expr->position());
   2651   // Call the IC initialization code.
   2652   Handle<Code> ic =
   2653       isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
   2654   __ ldr(r2, MemOperand(sp, (arg_count + 1) * kPointerSize));  // Key.
   2655   CallIC(ic, RelocInfo::CODE_TARGET, expr->CallFeedbackId());
   2656   RecordJSReturnSite(expr);
   2657   // Restore context register.
   2658   __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
   2659   context()->DropAndPlug(1, r0);  // Drop the key still on the stack.
   2660 }
   2661 
   2662 
   2663 void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
   2664   // Code common for calls using the call stub.
   2665   ZoneList<Expression*>* args = expr->arguments();
   2666   int arg_count = args->length();
   2667   { PreservePositionScope scope(masm()->positions_recorder());
   2668     for (int i = 0; i < arg_count; i++) {
   2669       VisitForStackValue(args->at(i));
   2670     }
   2671   }
   2672   // Record source position for debugger.
   2673   SetSourcePosition(expr->position());
   2674 
   2675   // Record call targets in unoptimized code.
   2676   flags = static_cast<CallFunctionFlags>(flags | RECORD_CALL_TARGET);
   2677   Handle<Object> uninitialized =
   2678       TypeFeedbackCells::UninitializedSentinel(isolate());
   2679   Handle<Cell> cell = isolate()->factory()->NewCell(uninitialized);
   2680   RecordTypeFeedbackCell(expr->CallFeedbackId(), cell);
   2681   __ mov(r2, Operand(cell));
   2682 
   2683   CallFunctionStub stub(arg_count, flags);
   2684   __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
   2685   __ CallStub(&stub, expr->CallFeedbackId());
   2686   RecordJSReturnSite(expr);
   2687   // Restore context register.
   2688   __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
   2689   context()->DropAndPlug(1, r0);
   2690 }
   2691 
   2692 
   2693 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
   2694   // Push copy of the first argument or undefined if it doesn't exist.
   2695   if (arg_count > 0) {
   2696     __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
   2697   } else {
   2698     __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
   2699   }
   2700   __ push(r1);
   2701 
   2702   // Push the receiver of the enclosing function.
   2703   int receiver_offset = 2 + info_->scope()->num_parameters();
   2704   __ ldr(r1, MemOperand(fp, receiver_offset * kPointerSize));
   2705   __ push(r1);
   2706   // Push the language mode.
   2707   __ mov(r1, Operand(Smi::FromInt(language_mode())));
   2708   __ push(r1);
   2709 
   2710   // Push the start position of the scope the calls resides in.
   2711   __ mov(r1, Operand(Smi::FromInt(scope()->start_position())));
   2712   __ push(r1);
   2713 
   2714   // Do the runtime call.
   2715   __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
   2716 }
   2717 
   2718 
   2719 void FullCodeGenerator::VisitCall(Call* expr) {
   2720 #ifdef DEBUG
   2721   // We want to verify that RecordJSReturnSite gets called on all paths
   2722   // through this function.  Avoid early returns.
   2723   expr->return_is_recorded_ = false;
   2724 #endif
   2725 
   2726   Comment cmnt(masm_, "[ Call");
   2727   Expression* callee = expr->expression();
   2728   VariableProxy* proxy = callee->AsVariableProxy();
   2729   Property* property = callee->AsProperty();
   2730 
   2731   if (proxy != NULL && proxy->var()->is_possibly_eval(isolate())) {
   2732     // In a call to eval, we first call %ResolvePossiblyDirectEval to
   2733     // resolve the function we need to call and the receiver of the
   2734     // call.  Then we call the resolved function using the given
   2735     // arguments.
   2736     ZoneList<Expression*>* args = expr->arguments();
   2737     int arg_count = args->length();
   2738 
   2739     { PreservePositionScope pos_scope(masm()->positions_recorder());
   2740       VisitForStackValue(callee);
   2741       __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
   2742       __ push(r2);  // Reserved receiver slot.
   2743 
   2744       // Push the arguments.
   2745       for (int i = 0; i < arg_count; i++) {
   2746         VisitForStackValue(args->at(i));
   2747       }
   2748 
   2749       // Push a copy of the function (found below the arguments) and
   2750       // resolve eval.
   2751       __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
   2752       __ push(r1);
   2753       EmitResolvePossiblyDirectEval(arg_count);
   2754 
   2755       // The runtime call returns a pair of values in r0 (function) and
   2756       // r1 (receiver). Touch up the stack with the right values.
   2757       __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize));
   2758       __ str(r1, MemOperand(sp, arg_count * kPointerSize));
   2759     }
   2760 
   2761     // Record source position for debugger.
   2762     SetSourcePosition(expr->position());
   2763     CallFunctionStub stub(arg_count, RECEIVER_MIGHT_BE_IMPLICIT);
   2764     __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
   2765     __ CallStub(&stub);
   2766     RecordJSReturnSite(expr);
   2767     // Restore context register.
   2768     __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
   2769     context()->DropAndPlug(1, r0);
   2770   } else if (proxy != NULL && proxy->var()->IsUnallocated()) {
   2771     // Push global object as receiver for the call IC.
   2772     __ ldr(r0, GlobalObjectOperand());
   2773     __ push(r0);
   2774     EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
   2775   } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
   2776     // Call to a lookup slot (dynamically introduced variable).
   2777     Label slow, done;
   2778 
   2779     { PreservePositionScope scope(masm()->positions_recorder());
   2780       // Generate code for loading from variables potentially shadowed
   2781       // by eval-introduced variables.
   2782       EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
   2783     }
   2784 
   2785     __ bind(&slow);
   2786     // Call the runtime to find the function to call (returned in r0)
   2787     // and the object holding it (returned in edx).
   2788     __ push(context_register());
   2789     __ mov(r2, Operand(proxy->name()));
   2790     __ push(r2);
   2791     __ CallRuntime(Runtime::kLoadContextSlot, 2);
   2792     __ Push(r0, r1);  // Function, receiver.
   2793 
   2794     // If fast case code has been generated, emit code to push the
   2795     // function and receiver and have the slow path jump around this
   2796     // code.
   2797     if (done.is_linked()) {
   2798       Label call;
   2799       __ b(&call);
   2800       __ bind(&done);
   2801       // Push function.
   2802       __ push(r0);
   2803       // The receiver is implicitly the global receiver. Indicate this
   2804       // by passing the hole to the call function stub.
   2805       __ LoadRoot(r1, Heap::kTheHoleValueRootIndex);
   2806       __ push(r1);
   2807       __ bind(&call);
   2808     }
   2809 
   2810     // The receiver is either the global receiver or an object found
   2811     // by LoadContextSlot. That object could be the hole if the
   2812     // receiver is implicitly the global object.
   2813     EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT);
   2814   } else if (property != NULL) {
   2815     { PreservePositionScope scope(masm()->positions_recorder());
   2816       VisitForStackValue(property->obj());
   2817     }
   2818     if (property->key()->IsPropertyName()) {
   2819       EmitCallWithIC(expr,
   2820                      property->key()->AsLiteral()->value(),
   2821                      RelocInfo::CODE_TARGET);
   2822     } else {
   2823       EmitKeyedCallWithIC(expr, property->key());
   2824     }
   2825   } else {
   2826     // Call to an arbitrary expression not handled specially above.
   2827     { PreservePositionScope scope(masm()->positions_recorder());
   2828       VisitForStackValue(callee);
   2829     }
   2830     // Load global receiver object.
   2831     __ ldr(r1, GlobalObjectOperand());
   2832     __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
   2833     __ push(r1);
   2834     // Emit function call.
   2835     EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
   2836   }
   2837 
   2838 #ifdef DEBUG
   2839   // RecordJSReturnSite should have been called.
   2840   ASSERT(expr->return_is_recorded_);
   2841 #endif
   2842 }
   2843 
   2844 
   2845 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
   2846   Comment cmnt(masm_, "[ CallNew");
   2847   // According to ECMA-262, section 11.2.2, page 44, the function
   2848   // expression in new calls must be evaluated before the
   2849   // arguments.
   2850 
   2851   // Push constructor on the stack.  If it's not a function it's used as
   2852   // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
   2853   // ignored.
   2854   VisitForStackValue(expr->expression());
   2855 
   2856   // Push the arguments ("left-to-right") on the stack.
   2857   ZoneList<Expression*>* args = expr->arguments();
   2858   int arg_count = args->length();
   2859   for (int i = 0; i < arg_count; i++) {
   2860     VisitForStackValue(args->at(i));
   2861   }
   2862 
   2863   // Call the construct call builtin that handles allocation and
   2864   // constructor invocation.
   2865   SetSourcePosition(expr->position());
   2866 
   2867   // Load function and argument count into r1 and r0.
   2868   __ mov(r0, Operand(arg_count));
   2869   __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
   2870 
   2871   // Record call targets in unoptimized code.
   2872   Handle<Object> uninitialized =
   2873       TypeFeedbackCells::UninitializedSentinel(isolate());
   2874   Handle<Cell> cell = isolate()->factory()->NewCell(uninitialized);
   2875   RecordTypeFeedbackCell(expr->CallNewFeedbackId(), cell);
   2876   __ mov(r2, Operand(cell));
   2877 
   2878   CallConstructStub stub(RECORD_CALL_TARGET);
   2879   __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
   2880   PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
   2881   context()->Plug(r0);
   2882 }
   2883 
   2884 
   2885 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
   2886   ZoneList<Expression*>* args = expr->arguments();
   2887   ASSERT(args->length() == 1);
   2888 
   2889   VisitForAccumulatorValue(args->at(0));
   2890 
   2891   Label materialize_true, materialize_false;
   2892   Label* if_true = NULL;
   2893   Label* if_false = NULL;
   2894   Label* fall_through = NULL;
   2895   context()->PrepareTest(&materialize_true, &materialize_false,
   2896                          &if_true, &if_false, &fall_through);
   2897 
   2898   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2899   __ SmiTst(r0);
   2900   Split(eq, if_true, if_false, fall_through);
   2901 
   2902   context()->Plug(if_true, if_false);
   2903 }
   2904 
   2905 
   2906 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
   2907   ZoneList<Expression*>* args = expr->arguments();
   2908   ASSERT(args->length() == 1);
   2909 
   2910   VisitForAccumulatorValue(args->at(0));
   2911 
   2912   Label materialize_true, materialize_false;
   2913   Label* if_true = NULL;
   2914   Label* if_false = NULL;
   2915   Label* fall_through = NULL;
   2916   context()->PrepareTest(&materialize_true, &materialize_false,
   2917                          &if_true, &if_false, &fall_through);
   2918 
   2919   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2920   __ NonNegativeSmiTst(r0);
   2921   Split(eq, if_true, if_false, fall_through);
   2922 
   2923   context()->Plug(if_true, if_false);
   2924 }
   2925 
   2926 
   2927 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
   2928   ZoneList<Expression*>* args = expr->arguments();
   2929   ASSERT(args->length() == 1);
   2930 
   2931   VisitForAccumulatorValue(args->at(0));
   2932 
   2933   Label materialize_true, materialize_false;
   2934   Label* if_true = NULL;
   2935   Label* if_false = NULL;
   2936   Label* fall_through = NULL;
   2937   context()->PrepareTest(&materialize_true, &materialize_false,
   2938                          &if_true, &if_false, &fall_through);
   2939 
   2940   __ JumpIfSmi(r0, if_false);
   2941   __ LoadRoot(ip, Heap::kNullValueRootIndex);
   2942   __ cmp(r0, ip);
   2943   __ b(eq, if_true);
   2944   __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
   2945   // Undetectable objects behave like undefined when tested with typeof.
   2946   __ ldrb(r1, FieldMemOperand(r2, Map::kBitFieldOffset));
   2947   __ tst(r1, Operand(1 << Map::kIsUndetectable));
   2948   __ b(ne, if_false);
   2949   __ ldrb(r1, FieldMemOperand(r2, Map::kInstanceTypeOffset));
   2950   __ cmp(r1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
   2951   __ b(lt, if_false);
   2952   __ cmp(r1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
   2953   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2954   Split(le, if_true, if_false, fall_through);
   2955 
   2956   context()->Plug(if_true, if_false);
   2957 }
   2958 
   2959 
   2960 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
   2961   ZoneList<Expression*>* args = expr->arguments();
   2962   ASSERT(args->length() == 1);
   2963 
   2964   VisitForAccumulatorValue(args->at(0));
   2965 
   2966   Label materialize_true, materialize_false;
   2967   Label* if_true = NULL;
   2968   Label* if_false = NULL;
   2969   Label* fall_through = NULL;
   2970   context()->PrepareTest(&materialize_true, &materialize_false,
   2971                          &if_true, &if_false, &fall_through);
   2972 
   2973   __ JumpIfSmi(r0, if_false);
   2974   __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
   2975   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2976   Split(ge, if_true, if_false, fall_through);
   2977 
   2978   context()->Plug(if_true, if_false);
   2979 }
   2980 
   2981 
   2982 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
   2983   ZoneList<Expression*>* args = expr->arguments();
   2984   ASSERT(args->length() == 1);
   2985 
   2986   VisitForAccumulatorValue(args->at(0));
   2987 
   2988   Label materialize_true, materialize_false;
   2989   Label* if_true = NULL;
   2990   Label* if_false = NULL;
   2991   Label* fall_through = NULL;
   2992   context()->PrepareTest(&materialize_true, &materialize_false,
   2993                          &if_true, &if_false, &fall_through);
   2994 
   2995   __ JumpIfSmi(r0, if_false);
   2996   __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
   2997   __ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset));
   2998   __ tst(r1, Operand(1 << Map::kIsUndetectable));
   2999   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   3000   Split(ne, if_true, if_false, fall_through);
   3001 
   3002   context()->Plug(if_true, if_false);
   3003 }
   3004 
   3005 
   3006 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
   3007     CallRuntime* expr) {
   3008   ZoneList<Expression*>* args = expr->arguments();
   3009   ASSERT(args->length() == 1);
   3010 
   3011   VisitForAccumulatorValue(args->at(0));
   3012 
   3013   Label materialize_true, materialize_false;
   3014   Label* if_true = NULL;
   3015   Label* if_false = NULL;
   3016   Label* fall_through = NULL;
   3017   context()->PrepareTest(&materialize_true, &materialize_false,
   3018                          &if_true, &if_false, &fall_through);
   3019 
   3020   __ AssertNotSmi(r0);
   3021 
   3022   __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
   3023   __ ldrb(ip, FieldMemOperand(r1, Map::kBitField2Offset));
   3024   __ tst(ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
   3025   __ b(ne, if_true);
   3026 
   3027   // Check for fast case object. Generate false result for slow case object.
   3028   __ ldr(r2, FieldMemOperand(r0, JSObject::kPropertiesOffset));
   3029   __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
   3030   __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
   3031   __ cmp(r2, ip);
   3032   __ b(eq, if_false);
   3033 
   3034   // Look for valueOf name in the descriptor array, and indicate false if
   3035   // found. Since we omit an enumeration index check, if it is added via a
   3036   // transition that shares its descriptor array, this is a false positive.
   3037   Label entry, loop, done;
   3038 
   3039   // Skip loop if no descriptors are valid.
   3040   __ NumberOfOwnDescriptors(r3, r1);
   3041   __ cmp(r3, Operand::Zero());
   3042   __ b(eq, &done);
   3043 
   3044   __ LoadInstanceDescriptors(r1, r4);
   3045   // r4: descriptor array.
   3046   // r3: valid entries in the descriptor array.
   3047   __ mov(ip, Operand(DescriptorArray::kDescriptorSize));
   3048   __ mul(r3, r3, ip);
   3049   // Calculate location of the first key name.
   3050   __ add(r4, r4, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
   3051   // Calculate the end of the descriptor array.
   3052   __ mov(r2, r4);
   3053   __ add(r2, r2, Operand::PointerOffsetFromSmiKey(r3));
   3054 
   3055   // Loop through all the keys in the descriptor array. If one of these is the
   3056   // string "valueOf" the result is false.
   3057   // The use of ip to store the valueOf string assumes that it is not otherwise
   3058   // used in the loop below.
   3059   __ mov(ip, Operand(isolate()->factory()->value_of_string()));
   3060   __ jmp(&entry);
   3061   __ bind(&loop);
   3062   __ ldr(r3, MemOperand(r4, 0));
   3063   __ cmp(r3, ip);
   3064   __ b(eq, if_false);
   3065   __ add(r4, r4, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
   3066   __ bind(&entry);
   3067   __ cmp(r4, Operand(r2));
   3068   __ b(ne, &loop);
   3069 
   3070   __ bind(&done);
   3071   // If a valueOf property is not found on the object check that its
   3072   // prototype is the un-modified String prototype. If not result is false.
   3073   __ ldr(r2, FieldMemOperand(r1, Map::kPrototypeOffset));
   3074   __ JumpIfSmi(r2, if_false);
   3075   __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
   3076   __ ldr(r3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
   3077   __ ldr(r3, FieldMemOperand(r3, GlobalObject::kNativeContextOffset));
   3078   __ ldr(r3, ContextOperand(r3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
   3079   __ cmp(r2, r3);
   3080   __ b(ne, if_false);
   3081 
   3082   // Set the bit in the map to indicate that it has been checked safe for
   3083   // default valueOf and set true result.
   3084   __ ldrb(r2, FieldMemOperand(r1, Map::kBitField2Offset));
   3085   __ orr(r2, r2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
   3086   __ strb(r2, FieldMemOperand(r1, Map::kBitField2Offset));
   3087   __ jmp(if_true);
   3088 
   3089   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   3090   context()->Plug(if_true, if_false);
   3091 }
   3092 
   3093 
   3094 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
   3095   ZoneList<Expression*>* args = expr->arguments();
   3096   ASSERT(args->length() == 1);
   3097 
   3098   VisitForAccumulatorValue(args->at(0));
   3099 
   3100   Label materialize_true, materialize_false;
   3101   Label* if_true = NULL;
   3102   Label* if_false = NULL;
   3103   Label* fall_through = NULL;
   3104   context()->PrepareTest(&materialize_true, &materialize_false,
   3105                          &if_true, &if_false, &fall_through);
   3106 
   3107   __ JumpIfSmi(r0, if_false);
   3108   __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE);
   3109   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   3110   Split(eq, if_true, if_false, fall_through);
   3111 
   3112   context()->Plug(if_true, if_false);
   3113 }
   3114 
   3115 
   3116 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
   3117   ZoneList<Expression*>* args = expr->arguments();
   3118   ASSERT(args->length() == 1);
   3119 
   3120   VisitForAccumulatorValue(args->at(0));
   3121 
   3122   Label materialize_true, materialize_false;
   3123   Label* if_true = NULL;
   3124   Label* if_false = NULL;
   3125   Label* fall_through = NULL;
   3126   context()->PrepareTest(&materialize_true, &materialize_false,
   3127                          &if_true, &if_false, &fall_through);
   3128 
   3129   __ JumpIfSmi(r0, if_false);
   3130   __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE);
   3131   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   3132   Split(eq, if_true, if_false, fall_through);
   3133 
   3134   context()->Plug(if_true, if_false);
   3135 }
   3136 
   3137 
   3138 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
   3139   ZoneList<Expression*>* args = expr->arguments();
   3140   ASSERT(args->length() == 1);
   3141 
   3142   VisitForAccumulatorValue(args->at(0));
   3143 
   3144   Label materialize_true, materialize_false;
   3145   Label* if_true = NULL;
   3146   Label* if_false = NULL;
   3147   Label* fall_through = NULL;
   3148   context()->PrepareTest(&materialize_true, &materialize_false,
   3149                          &if_true, &if_false, &fall_through);
   3150 
   3151   __ JumpIfSmi(r0, if_false);
   3152   __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
   3153   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   3154   Split(eq, if_true, if_false, fall_through);
   3155 
   3156   context()->Plug(if_true, if_false);
   3157 }
   3158 
   3159 
   3160 
   3161 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
   3162   ASSERT(expr->arguments()->length() == 0);
   3163 
   3164   Label materialize_true, materialize_false;
   3165   Label* if_true = NULL;
   3166   Label* if_false = NULL;
   3167   Label* fall_through = NULL;
   3168   context()->PrepareTest(&materialize_true, &materialize_false,
   3169                          &if_true, &if_false, &fall_through);
   3170 
   3171   // Get the frame pointer for the calling frame.
   3172   __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
   3173 
   3174   // Skip the arguments adaptor frame if it exists.
   3175   Label check_frame_marker;
   3176   __ ldr(r1, MemOperand(r2, StandardFrameConstants::kContextOffset));
   3177   __ cmp(r1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
   3178   __ b(ne, &check_frame_marker);
   3179   __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset));
   3180 
   3181   // Check the marker in the calling frame.
   3182   __ bind(&check_frame_marker);
   3183   __ ldr(r1, MemOperand(r2, StandardFrameConstants::kMarkerOffset));
   3184   __ cmp(r1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
   3185   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   3186   Split(eq, if_true, if_false, fall_through);
   3187 
   3188   context()->Plug(if_true, if_false);
   3189 }
   3190 
   3191 
   3192 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
   3193   ZoneList<Expression*>* args = expr->arguments();
   3194   ASSERT(args->length() == 2);
   3195 
   3196   // Load the two objects into registers and perform the comparison.
   3197   VisitForStackValue(args->at(0));
   3198   VisitForAccumulatorValue(args->at(1));
   3199 
   3200   Label materialize_true, materialize_false;
   3201   Label* if_true = NULL;
   3202   Label* if_false = NULL;
   3203   Label* fall_through = NULL;
   3204   context()->PrepareTest(&materialize_true, &materialize_false,
   3205                          &if_true, &if_false, &fall_through);
   3206 
   3207   __ pop(r1);
   3208   __ cmp(r0, r1);
   3209   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   3210   Split(eq, if_true, if_false, fall_through);
   3211 
   3212   context()->Plug(if_true, if_false);
   3213 }
   3214 
   3215 
   3216 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
   3217   ZoneList<Expression*>* args = expr->arguments();
   3218   ASSERT(args->length() == 1);
   3219 
   3220   // ArgumentsAccessStub expects the key in edx and the formal
   3221   // parameter count in r0.
   3222   VisitForAccumulatorValue(args->at(0));
   3223   __ mov(r1, r0);
   3224   __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
   3225   ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
   3226   __ CallStub(&stub);
   3227   context()->Plug(r0);
   3228 }
   3229 
   3230 
   3231 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
   3232   ASSERT(expr->arguments()->length() == 0);
   3233   Label exit;
   3234   // Get the number of formal parameters.
   3235   __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
   3236 
   3237   // Check if the calling frame is an arguments adaptor frame.
   3238   __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
   3239   __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
   3240   __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
   3241   __ b(ne, &exit);
   3242 
   3243   // Arguments adaptor case: Read the arguments length from the
   3244   // adaptor frame.
   3245   __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset));
   3246 
   3247   __ bind(&exit);
   3248   context()->Plug(r0);
   3249 }
   3250 
   3251 
   3252 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
   3253   ZoneList<Expression*>* args = expr->arguments();
   3254   ASSERT(args->length() == 1);
   3255   Label done, null, function, non_function_constructor;
   3256 
   3257   VisitForAccumulatorValue(args->at(0));
   3258 
   3259   // If the object is a smi, we return null.
   3260   __ JumpIfSmi(r0, &null);
   3261 
   3262   // Check that the object is a JS object but take special care of JS
   3263   // functions to make sure they have 'Function' as their class.
   3264   // Assume that there are only two callable types, and one of them is at
   3265   // either end of the type range for JS object types. Saves extra comparisons.
   3266   STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
   3267   __ CompareObjectType(r0, r0, r1, FIRST_SPEC_OBJECT_TYPE);
   3268   // Map is now in r0.
   3269   __ b(lt, &null);
   3270   STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
   3271                 FIRST_SPEC_OBJECT_TYPE + 1);
   3272   __ b(eq, &function);
   3273 
   3274   __ cmp(r1, Operand(LAST_SPEC_OBJECT_TYPE));
   3275   STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
   3276                 LAST_SPEC_OBJECT_TYPE - 1);
   3277   __ b(eq, &function);
   3278   // Assume that there is no larger type.
   3279   STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
   3280 
   3281   // Check if the constructor in the map is a JS function.
   3282   __ ldr(r0, FieldMemOperand(r0, Map::kConstructorOffset));
   3283   __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
   3284   __ b(ne, &non_function_constructor);
   3285 
   3286   // r0 now contains the constructor function. Grab the
   3287   // instance class name from there.
   3288   __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
   3289   __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset));
   3290   __ b(&done);
   3291 
   3292   // Functions have class 'Function'.
   3293   __ bind(&function);
   3294   __ LoadRoot(r0, Heap::kfunction_class_stringRootIndex);
   3295   __ jmp(&done);
   3296 
   3297   // Objects with a non-function constructor have class 'Object'.
   3298   __ bind(&non_function_constructor);
   3299   __ LoadRoot(r0, Heap::kObject_stringRootIndex);
   3300   __ jmp(&done);
   3301 
   3302   // Non-JS objects have class null.
   3303   __ bind(&null);
   3304   __ LoadRoot(r0, Heap::kNullValueRootIndex);
   3305 
   3306   // All done.
   3307   __ bind(&done);
   3308 
   3309   context()->Plug(r0);
   3310 }
   3311 
   3312 
   3313 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
   3314   // Conditionally generate a log call.
   3315   // Args:
   3316   //   0 (literal string): The type of logging (corresponds to the flags).
   3317   //     This is used to determine whether or not to generate the log call.
   3318   //   1 (string): Format string.  Access the string at argument index 2
   3319   //     with '%2s' (see Logger::LogRuntime for all the formats).
   3320   //   2 (array): Arguments to the format string.
   3321   ZoneList<Expression*>* args = expr->arguments();
   3322   ASSERT_EQ(args->length(), 3);
   3323   if (CodeGenerator::ShouldGenerateLog(args->at(0))) {
   3324     VisitForStackValue(args->at(1));
   3325     VisitForStackValue(args->at(2));
   3326     __ CallRuntime(Runtime::kLog, 2);
   3327   }
   3328 
   3329   // Finally, we're expected to leave a value on the top of the stack.
   3330   __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
   3331   context()->Plug(r0);
   3332 }
   3333 
   3334 
   3335 void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) {
   3336   ASSERT(expr->arguments()->length() == 0);
   3337   Label slow_allocate_heapnumber;
   3338   Label heapnumber_allocated;
   3339 
   3340   __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
   3341   __ AllocateHeapNumber(r4, r1, r2, r6, &slow_allocate_heapnumber);
   3342   __ jmp(&heapnumber_allocated);
   3343 
   3344   __ bind(&slow_allocate_heapnumber);
   3345   // Allocate a heap number.
   3346   __ CallRuntime(Runtime::kNumberAlloc, 0);
   3347   __ mov(r4, Operand(r0));
   3348 
   3349   __ bind(&heapnumber_allocated);
   3350 
   3351   // Convert 32 random bits in r0 to 0.(32 random bits) in a double
   3352   // by computing:
   3353   // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
   3354   __ PrepareCallCFunction(1, r0);
   3355   __ ldr(r0,
   3356          ContextOperand(context_register(), Context::GLOBAL_OBJECT_INDEX));
   3357   __ ldr(r0, FieldMemOperand(r0, GlobalObject::kNativeContextOffset));
   3358   __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
   3359 
   3360   // 0x41300000 is the top half of 1.0 x 2^20 as a double.
   3361   // Create this constant using mov/orr to avoid PC relative load.
   3362   __ mov(r1, Operand(0x41000000));
   3363   __ orr(r1, r1, Operand(0x300000));
   3364   // Move 0x41300000xxxxxxxx (x = random bits) to VFP.
   3365   __ vmov(d7, r0, r1);
   3366   // Move 0x4130000000000000 to VFP.
   3367   __ mov(r0, Operand::Zero());
   3368   __ vmov(d8, r0, r1);
   3369   // Subtract and store the result in the heap number.
   3370   __ vsub(d7, d7, d8);
   3371   __ sub(r0, r4, Operand(kHeapObjectTag));
   3372   __ vstr(d7, r0, HeapNumber::kValueOffset);
   3373   __ mov(r0, r4);
   3374 
   3375   context()->Plug(r0);
   3376 }
   3377 
   3378 
   3379 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
   3380   // Load the arguments on the stack and call the stub.
   3381   SubStringStub stub;
   3382   ZoneList<Expression*>* args = expr->arguments();
   3383   ASSERT(args->length() == 3);
   3384   VisitForStackValue(args->at(0));
   3385   VisitForStackValue(args->at(1));
   3386   VisitForStackValue(args->at(2));
   3387   __ CallStub(&stub);
   3388   context()->Plug(r0);
   3389 }
   3390 
   3391 
   3392 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
   3393   // Load the arguments on the stack and call the stub.
   3394   RegExpExecStub stub;
   3395   ZoneList<Expression*>* args = expr->arguments();
   3396   ASSERT(args->length() == 4);
   3397   VisitForStackValue(args->at(0));
   3398   VisitForStackValue(args->at(1));
   3399   VisitForStackValue(args->at(2));
   3400   VisitForStackValue(args->at(3));
   3401   __ CallStub(&stub);
   3402   context()->Plug(r0);
   3403 }
   3404 
   3405 
   3406 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
   3407   ZoneList<Expression*>* args = expr->arguments();
   3408   ASSERT(args->length() == 1);
   3409   VisitForAccumulatorValue(args->at(0));  // Load the object.
   3410 
   3411   Label done;
   3412   // If the object is a smi return the object.
   3413   __ JumpIfSmi(r0, &done);
   3414   // If the object is not a value type, return the object.
   3415   __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE);
   3416   __ b(ne, &done);
   3417   __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset));
   3418 
   3419   __ bind(&done);
   3420   context()->Plug(r0);
   3421 }
   3422 
   3423 
   3424 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
   3425   ZoneList<Expression*>* args = expr->arguments();
   3426   ASSERT(args->length() == 2);
   3427   ASSERT_NE(NULL, args->at(1)->AsLiteral());
   3428   Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
   3429 
   3430   VisitForAccumulatorValue(args->at(0));  // Load the object.
   3431 
   3432   Label runtime, done, not_date_object;
   3433   Register object = r0;
   3434   Register result = r0;
   3435   Register scratch0 = r9;
   3436   Register scratch1 = r1;
   3437 
   3438   __ JumpIfSmi(object, &not_date_object);
   3439   __ CompareObjectType(object, scratch1, scratch1, JS_DATE_TYPE);
   3440   __ b(ne, &not_date_object);
   3441 
   3442   if (index->value() == 0) {
   3443     __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
   3444     __ jmp(&done);
   3445   } else {
   3446     if (index->value() < JSDate::kFirstUncachedField) {
   3447       ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
   3448       __ mov(scratch1, Operand(stamp));
   3449       __ ldr(scratch1, MemOperand(scratch1));
   3450       __ ldr(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
   3451       __ cmp(scratch1, scratch0);
   3452       __ b(ne, &runtime);
   3453       __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset +
   3454                                              kPointerSize * index->value()));
   3455       __ jmp(&done);
   3456     }
   3457     __ bind(&runtime);
   3458     __ PrepareCallCFunction(2, scratch1);
   3459     __ mov(r1, Operand(index));
   3460     __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
   3461     __ jmp(&done);
   3462   }
   3463 
   3464   __ bind(&not_date_object);
   3465   __ CallRuntime(Runtime::kThrowNotDateError, 0);
   3466   __ bind(&done);
   3467   context()->Plug(r0);
   3468 }
   3469 
   3470 
   3471 void FullCodeGenerator::EmitSeqStringSetCharCheck(Register string,
   3472                                                   Register index,
   3473                                                   Register value,
   3474                                                   uint32_t encoding_mask) {
   3475   __ SmiTst(index);
   3476   __ Check(eq, kNonSmiIndex);
   3477   __ SmiTst(value);
   3478   __ Check(eq, kNonSmiValue);
   3479 
   3480   __ ldr(ip, FieldMemOperand(string, String::kLengthOffset));
   3481   __ cmp(index, ip);
   3482   __ Check(lt, kIndexIsTooLarge);
   3483 
   3484   __ cmp(index, Operand(Smi::FromInt(0)));
   3485   __ Check(ge, kIndexIsNegative);
   3486 
   3487   __ ldr(ip, FieldMemOperand(string, HeapObject::kMapOffset));
   3488   __ ldrb(ip, FieldMemOperand(ip, Map::kInstanceTypeOffset));
   3489 
   3490   __ and_(ip, ip, Operand(kStringRepresentationMask | kStringEncodingMask));
   3491   __ cmp(ip, Operand(encoding_mask));
   3492   __ Check(eq, kUnexpectedStringType);
   3493 }
   3494 
   3495 
   3496 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
   3497   ZoneList<Expression*>* args = expr->arguments();
   3498   ASSERT_EQ(3, args->length());
   3499 
   3500   Register string = r0;
   3501   Register index = r1;
   3502   Register value = r2;
   3503 
   3504   VisitForStackValue(args->at(1));  // index
   3505   VisitForStackValue(args->at(2));  // value
   3506   __ pop(value);
   3507   __ pop(index);
   3508   VisitForAccumulatorValue(args->at(0));  // string
   3509 
   3510   if (FLAG_debug_code) {
   3511     static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
   3512     EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
   3513   }
   3514 
   3515   __ SmiUntag(value, value);
   3516   __ add(ip,
   3517          string,
   3518          Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
   3519   __ strb(value, MemOperand(ip, index, LSR, kSmiTagSize));
   3520   context()->Plug(string);
   3521 }
   3522 
   3523 
   3524 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
   3525   ZoneList<Expression*>* args = expr->arguments();
   3526   ASSERT_EQ(3, args->length());
   3527 
   3528   Register string = r0;
   3529   Register index = r1;
   3530   Register value = r2;
   3531 
   3532   VisitForStackValue(args->at(1));  // index
   3533   VisitForStackValue(args->at(2));  // value
   3534   __ pop(value);
   3535   __ pop(index);
   3536   VisitForAccumulatorValue(args->at(0));  // string
   3537 
   3538   if (FLAG_debug_code) {
   3539     static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
   3540     EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
   3541   }
   3542 
   3543   __ SmiUntag(value, value);
   3544   __ add(ip,
   3545          string,
   3546          Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
   3547   STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
   3548   __ strh(value, MemOperand(ip, index));
   3549   context()->Plug(string);
   3550 }
   3551 
   3552 
   3553 
   3554 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
   3555   // Load the arguments on the stack and call the runtime function.
   3556   ZoneList<Expression*>* args = expr->arguments();
   3557   ASSERT(args->length() == 2);
   3558   VisitForStackValue(args->at(0));
   3559   VisitForStackValue(args->at(1));
   3560   MathPowStub stub(MathPowStub::ON_STACK);
   3561   __ CallStub(&stub);
   3562   context()->Plug(r0);
   3563 }
   3564 
   3565 
   3566 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
   3567   ZoneList<Expression*>* args = expr->arguments();
   3568   ASSERT(args->length() == 2);
   3569   VisitForStackValue(args->at(0));  // Load the object.
   3570   VisitForAccumulatorValue(args->at(1));  // Load the value.
   3571   __ pop(r1);  // r0 = value. r1 = object.
   3572 
   3573   Label done;
   3574   // If the object is a smi, return the value.
   3575   __ JumpIfSmi(r1, &done);
   3576 
   3577   // If the object is not a value type, return the value.
   3578   __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE);
   3579   __ b(ne, &done);
   3580 
   3581   // Store the value.
   3582   __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset));
   3583   // Update the write barrier.  Save the value as it will be
   3584   // overwritten by the write barrier code and is needed afterward.
   3585   __ mov(r2, r0);
   3586   __ RecordWriteField(
   3587       r1, JSValue::kValueOffset, r2, r3, kLRHasBeenSaved, kDontSaveFPRegs);
   3588 
   3589   __ bind(&done);
   3590   context()->Plug(r0);
   3591 }
   3592 
   3593 
   3594 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
   3595   ZoneList<Expression*>* args = expr->arguments();
   3596   ASSERT_EQ(args->length(), 1);
   3597   // Load the argument on the stack and call the stub.
   3598   VisitForStackValue(args->at(0));
   3599 
   3600   NumberToStringStub stub;
   3601   __ CallStub(&stub);
   3602   context()->Plug(r0);
   3603 }
   3604 
   3605 
   3606 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
   3607   ZoneList<Expression*>* args = expr->arguments();
   3608   ASSERT(args->length() == 1);
   3609   VisitForAccumulatorValue(args->at(0));
   3610 
   3611   Label done;
   3612   StringCharFromCodeGenerator generator(r0, r1);
   3613   generator.GenerateFast(masm_);
   3614   __ jmp(&done);
   3615 
   3616   NopRuntimeCallHelper call_helper;
   3617   generator.GenerateSlow(masm_, call_helper);
   3618 
   3619   __ bind(&done);
   3620   context()->Plug(r1);
   3621 }
   3622 
   3623 
   3624 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
   3625   ZoneList<Expression*>* args = expr->arguments();
   3626   ASSERT(args->length() == 2);
   3627   VisitForStackValue(args->at(0));
   3628   VisitForAccumulatorValue(args->at(1));
   3629 
   3630   Register object = r1;
   3631   Register index = r0;
   3632   Register result = r3;
   3633 
   3634   __ pop(object);
   3635 
   3636   Label need_conversion;
   3637   Label index_out_of_range;
   3638   Label done;
   3639   StringCharCodeAtGenerator generator(object,
   3640                                       index,
   3641                                       result,
   3642                                       &need_conversion,
   3643                                       &need_conversion,
   3644                                       &index_out_of_range,
   3645                                       STRING_INDEX_IS_NUMBER);
   3646   generator.GenerateFast(masm_);
   3647   __ jmp(&done);
   3648 
   3649   __ bind(&index_out_of_range);
   3650   // When the index is out of range, the spec requires us to return
   3651   // NaN.
   3652   __ LoadRoot(result, Heap::kNanValueRootIndex);
   3653   __ jmp(&done);
   3654 
   3655   __ bind(&need_conversion);
   3656   // Load the undefined value into the result register, which will
   3657   // trigger conversion.
   3658   __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
   3659   __ jmp(&done);
   3660 
   3661   NopRuntimeCallHelper call_helper;
   3662   generator.GenerateSlow(masm_, call_helper);
   3663 
   3664   __ bind(&done);
   3665   context()->Plug(result);
   3666 }
   3667 
   3668 
   3669 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
   3670   ZoneList<Expression*>* args = expr->arguments();
   3671   ASSERT(args->length() == 2);
   3672   VisitForStackValue(args->at(0));
   3673   VisitForAccumulatorValue(args->at(1));
   3674 
   3675   Register object = r1;
   3676   Register index = r0;
   3677   Register scratch = r3;
   3678   Register result = r0;
   3679 
   3680   __ pop(object);
   3681 
   3682   Label need_conversion;
   3683   Label index_out_of_range;
   3684   Label done;
   3685   StringCharAtGenerator generator(object,
   3686                                   index,
   3687                                   scratch,
   3688                                   result,
   3689                                   &need_conversion,
   3690                                   &need_conversion,
   3691                                   &index_out_of_range,
   3692                                   STRING_INDEX_IS_NUMBER);
   3693   generator.GenerateFast(masm_);
   3694   __ jmp(&done);
   3695 
   3696   __ bind(&index_out_of_range);
   3697   // When the index is out of range, the spec requires us to return
   3698   // the empty string.
   3699   __ LoadRoot(result, Heap::kempty_stringRootIndex);
   3700   __ jmp(&done);
   3701 
   3702   __ bind(&need_conversion);
   3703   // Move smi zero into the result register, which will trigger
   3704   // conversion.
   3705   __ mov(result, Operand(Smi::FromInt(0)));
   3706   __ jmp(&done);
   3707 
   3708   NopRuntimeCallHelper call_helper;
   3709   generator.GenerateSlow(masm_, call_helper);
   3710 
   3711   __ bind(&done);
   3712   context()->Plug(result);
   3713 }
   3714 
   3715 
   3716 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
   3717   ZoneList<Expression*>* args = expr->arguments();
   3718   ASSERT_EQ(2, args->length());
   3719   VisitForStackValue(args->at(0));
   3720   VisitForStackValue(args->at(1));
   3721 
   3722   StringAddStub stub(STRING_ADD_CHECK_BOTH);
   3723   __ CallStub(&stub);
   3724   context()->Plug(r0);
   3725 }
   3726 
   3727 
   3728 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
   3729   ZoneList<Expression*>* args = expr->arguments();
   3730   ASSERT_EQ(2, args->length());
   3731   VisitForStackValue(args->at(0));
   3732   VisitForStackValue(args->at(1));
   3733 
   3734   StringCompareStub stub;
   3735   __ CallStub(&stub);
   3736   context()->Plug(r0);
   3737 }
   3738 
   3739 
   3740 void FullCodeGenerator::EmitMathSin(CallRuntime* expr) {
   3741   // Load the argument on the stack and call the stub.
   3742   TranscendentalCacheStub stub(TranscendentalCache::SIN,
   3743                                TranscendentalCacheStub::TAGGED);
   3744   ZoneList<Expression*>* args = expr->arguments();
   3745   ASSERT(args->length() == 1);
   3746   VisitForStackValue(args->at(0));
   3747   __ CallStub(&stub);
   3748   context()->Plug(r0);
   3749 }
   3750 
   3751 
   3752 void FullCodeGenerator::EmitMathCos(CallRuntime* expr) {
   3753   // Load the argument on the stack and call the stub.
   3754   TranscendentalCacheStub stub(TranscendentalCache::COS,
   3755                                TranscendentalCacheStub::TAGGED);
   3756   ZoneList<Expression*>* args = expr->arguments();
   3757   ASSERT(args->length() == 1);
   3758   VisitForStackValue(args->at(0));
   3759   __ CallStub(&stub);
   3760   context()->Plug(r0);
   3761 }
   3762 
   3763 
   3764 void FullCodeGenerator::EmitMathTan(CallRuntime* expr) {
   3765   // Load the argument on the stack and call the stub.
   3766   TranscendentalCacheStub stub(TranscendentalCache::TAN,
   3767                                TranscendentalCacheStub::TAGGED);
   3768   ZoneList<Expression*>* args = expr->arguments();
   3769   ASSERT(args->length() == 1);
   3770   VisitForStackValue(args->at(0));
   3771   __ CallStub(&stub);
   3772   context()->Plug(r0);
   3773 }
   3774 
   3775 
   3776 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
   3777   // Load the argument on the stack and call the stub.
   3778   TranscendentalCacheStub stub(TranscendentalCache::LOG,
   3779                                TranscendentalCacheStub::TAGGED);
   3780   ZoneList<Expression*>* args = expr->arguments();
   3781   ASSERT(args->length() == 1);
   3782   VisitForStackValue(args->at(0));
   3783   __ CallStub(&stub);
   3784   context()->Plug(r0);
   3785 }
   3786 
   3787 
   3788 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
   3789   // Load the argument on the stack and call the runtime function.
   3790   ZoneList<Expression*>* args = expr->arguments();
   3791   ASSERT(args->length() == 1);
   3792   VisitForStackValue(args->at(0));
   3793   __ CallRuntime(Runtime::kMath_sqrt, 1);
   3794   context()->Plug(r0);
   3795 }
   3796 
   3797 
   3798 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
   3799   ZoneList<Expression*>* args = expr->arguments();
   3800   ASSERT(args->length() >= 2);
   3801 
   3802   int arg_count = args->length() - 2;  // 2 ~ receiver and function.
   3803   for (int i = 0; i < arg_count + 1; i++) {
   3804     VisitForStackValue(args->at(i));
   3805   }
   3806   VisitForAccumulatorValue(args->last());  // Function.
   3807 
   3808   Label runtime, done;
   3809   // Check for non-function argument (including proxy).
   3810   __ JumpIfSmi(r0, &runtime);
   3811   __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
   3812   __ b(ne, &runtime);
   3813 
   3814   // InvokeFunction requires the function in r1. Move it in there.
   3815   __ mov(r1, result_register());
   3816   ParameterCount count(arg_count);
   3817   __ InvokeFunction(r1, count, CALL_FUNCTION,
   3818                     NullCallWrapper(), CALL_AS_METHOD);
   3819   __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
   3820   __ jmp(&done);
   3821 
   3822   __ bind(&runtime);
   3823   __ push(r0);
   3824   __ CallRuntime(Runtime::kCall, args->length());
   3825   __ bind(&done);
   3826 
   3827   context()->Plug(r0);
   3828 }
   3829 
   3830 
   3831 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
   3832   RegExpConstructResultStub stub;
   3833   ZoneList<Expression*>* args = expr->arguments();
   3834   ASSERT(args->length() == 3);
   3835   VisitForStackValue(args->at(0));
   3836   VisitForStackValue(args->at(1));
   3837   VisitForStackValue(args->at(2));
   3838   __ CallStub(&stub);
   3839   context()->Plug(r0);
   3840 }
   3841 
   3842 
   3843 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
   3844   ZoneList<Expression*>* args = expr->arguments();
   3845   ASSERT_EQ(2, args->length());
   3846   ASSERT_NE(NULL, args->at(0)->AsLiteral());
   3847   int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
   3848 
   3849   Handle<FixedArray> jsfunction_result_caches(
   3850       isolate()->native_context()->jsfunction_result_caches());
   3851   if (jsfunction_result_caches->length() <= cache_id) {
   3852     __ Abort(kAttemptToUseUndefinedCache);
   3853     __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
   3854     context()->Plug(r0);
   3855     return;
   3856   }
   3857 
   3858   VisitForAccumulatorValue(args->at(1));
   3859 
   3860   Register key = r0;
   3861   Register cache = r1;
   3862   __ ldr(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
   3863   __ ldr(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
   3864   __ ldr(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
   3865   __ ldr(cache,
   3866          FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
   3867 
   3868 
   3869   Label done, not_found;
   3870   // tmp now holds finger offset as a smi.
   3871   __ ldr(r2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
   3872   // r2 now holds finger offset as a smi.
   3873   __ add(r3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
   3874   // r3 now points to the start of fixed array elements.
   3875   __ ldr(r2, MemOperand::PointerAddressFromSmiKey(r3, r2, PreIndex));
   3876   // Note side effect of PreIndex: r3 now points to the key of the pair.
   3877   __ cmp(key, r2);
   3878   __ b(ne, &not_found);
   3879 
   3880   __ ldr(r0, MemOperand(r3, kPointerSize));
   3881   __ b(&done);
   3882 
   3883   __ bind(&not_found);
   3884   // Call runtime to perform the lookup.
   3885   __ Push(cache, key);
   3886   __ CallRuntime(Runtime::kGetFromCache, 2);
   3887 
   3888   __ bind(&done);
   3889   context()->Plug(r0);
   3890 }
   3891 
   3892 
   3893 void FullCodeGenerator::EmitIsRegExpEquivalent(CallRuntime* expr) {
   3894   ZoneList<Expression*>* args = expr->arguments();
   3895   ASSERT_EQ(2, args->length());
   3896 
   3897   Register right = r0;
   3898   Register left = r1;
   3899   Register tmp = r2;
   3900   Register tmp2 = r3;
   3901 
   3902   VisitForStackValue(args->at(0));
   3903   VisitForAccumulatorValue(args->at(1));
   3904   __ pop(left);
   3905 
   3906   Label done, fail, ok;
   3907   __ cmp(left, Operand(right));
   3908   __ b(eq, &ok);
   3909   // Fail if either is a non-HeapObject.
   3910   __ and_(tmp, left, Operand(right));
   3911   __ JumpIfSmi(tmp, &fail);
   3912   __ ldr(tmp, FieldMemOperand(left, HeapObject::kMapOffset));
   3913   __ ldrb(tmp2, FieldMemOperand(tmp, Map::kInstanceTypeOffset));
   3914   __ cmp(tmp2, Operand(JS_REGEXP_TYPE));
   3915   __ b(ne, &fail);
   3916   __ ldr(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
   3917   __ cmp(tmp, Operand(tmp2));
   3918   __ b(ne, &fail);
   3919   __ ldr(tmp, FieldMemOperand(left, JSRegExp::kDataOffset));
   3920   __ ldr(tmp2, FieldMemOperand(right, JSRegExp::kDataOffset));
   3921   __ cmp(tmp, tmp2);
   3922   __ b(eq, &ok);
   3923   __ bind(&fail);
   3924   __ LoadRoot(r0, Heap::kFalseValueRootIndex);
   3925   __ jmp(&done);
   3926   __ bind(&ok);
   3927   __ LoadRoot(r0, Heap::kTrueValueRootIndex);
   3928   __ bind(&done);
   3929 
   3930   context()->Plug(r0);
   3931 }
   3932 
   3933 
   3934 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
   3935   ZoneList<Expression*>* args = expr->arguments();
   3936   VisitForAccumulatorValue(args->at(0));
   3937 
   3938   Label materialize_true, materialize_false;
   3939   Label* if_true = NULL;
   3940   Label* if_false = NULL;
   3941   Label* fall_through = NULL;
   3942   context()->PrepareTest(&materialize_true, &materialize_false,
   3943                          &if_true, &if_false, &fall_through);
   3944 
   3945   __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
   3946   __ tst(r0, Operand(String::kContainsCachedArrayIndexMask));
   3947   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   3948   Split(eq, if_true, if_false, fall_through);
   3949 
   3950   context()->Plug(if_true, if_false);
   3951 }
   3952 
   3953 
   3954 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
   3955   ZoneList<Expression*>* args = expr->arguments();
   3956   ASSERT(args->length() == 1);
   3957   VisitForAccumulatorValue(args->at(0));
   3958 
   3959   __ AssertString(r0);
   3960 
   3961   __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
   3962   __ IndexFromHash(r0, r0);
   3963 
   3964   context()->Plug(r0);
   3965 }
   3966 
   3967 
   3968 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
   3969   Label bailout, done, one_char_separator, long_separator,
   3970       non_trivial_array, not_size_one_array, loop,
   3971       empty_separator_loop, one_char_separator_loop,
   3972       one_char_separator_loop_entry, long_separator_loop;
   3973   ZoneList<Expression*>* args = expr->arguments();
   3974   ASSERT(args->length() == 2);
   3975   VisitForStackValue(args->at(1));
   3976   VisitForAccumulatorValue(args->at(0));
   3977 
   3978   // All aliases of the same register have disjoint lifetimes.
   3979   Register array = r0;
   3980   Register elements = no_reg;  // Will be r0.
   3981   Register result = no_reg;  // Will be r0.
   3982   Register separator = r1;
   3983   Register array_length = r2;
   3984   Register result_pos = no_reg;  // Will be r2
   3985   Register string_length = r3;
   3986   Register string = r4;
   3987   Register element = r5;
   3988   Register elements_end = r6;
   3989   Register scratch1 = r7;
   3990   Register scratch2 = r9;
   3991 
   3992   // Separator operand is on the stack.
   3993   __ pop(separator);
   3994 
   3995   // Check that the array is a JSArray.
   3996   __ JumpIfSmi(array, &bailout);
   3997   __ CompareObjectType(array, scratch1, scratch2, JS_ARRAY_TYPE);
   3998   __ b(ne, &bailout);
   3999 
   4000   // Check that the array has fast elements.
   4001   __ CheckFastElements(scratch1, scratch2, &bailout);
   4002 
   4003   // If the array has length zero, return the empty string.
   4004   __ ldr(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
   4005   __ SmiUntag(array_length, SetCC);
   4006   __ b(ne, &non_trivial_array);
   4007   __ LoadRoot(r0, Heap::kempty_stringRootIndex);
   4008   __ b(&done);
   4009 
   4010   __ bind(&non_trivial_array);
   4011 
   4012   // Get the FixedArray containing array's elements.
   4013   elements = array;
   4014   __ ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
   4015   array = no_reg;  // End of array's live range.
   4016 
   4017   // Check that all array elements are sequential ASCII strings, and
   4018   // accumulate the sum of their lengths, as a smi-encoded value.
   4019   __ mov(string_length, Operand::Zero());
   4020   __ add(element,
   4021          elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
   4022   __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
   4023   // Loop condition: while (element < elements_end).
   4024   // Live values in registers:
   4025   //   elements: Fixed array of strings.
   4026   //   array_length: Length of the fixed array of strings (not smi)
   4027   //   separator: Separator string
   4028   //   string_length: Accumulated sum of string lengths (smi).
   4029   //   element: Current array element.
   4030   //   elements_end: Array end.
   4031   if (generate_debug_code_) {
   4032     __ cmp(array_length, Operand::Zero());
   4033     __ Assert(gt, kNoEmptyArraysHereInEmitFastAsciiArrayJoin);
   4034   }
   4035   __ bind(&loop);
   4036   __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
   4037   __ JumpIfSmi(string, &bailout);
   4038   __ ldr(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
   4039   __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
   4040   __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
   4041   __ ldr(scratch1, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
   4042   __ add(string_length, string_length, Operand(scratch1), SetCC);
   4043   __ b(vs, &bailout);
   4044   __ cmp(element, elements_end);
   4045   __ b(lt, &loop);
   4046 
   4047   // If array_length is 1, return elements[0], a string.
   4048   __ cmp(array_length, Operand(1));
   4049   __ b(ne, &not_size_one_array);
   4050   __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize));
   4051   __ b(&done);
   4052 
   4053   __ bind(&not_size_one_array);
   4054 
   4055   // Live values in registers:
   4056   //   separator: Separator string
   4057   //   array_length: Length of the array.
   4058   //   string_length: Sum of string lengths (smi).
   4059   //   elements: FixedArray of strings.
   4060 
   4061   // Check that the separator is a flat ASCII string.
   4062   __ JumpIfSmi(separator, &bailout);
   4063   __ ldr(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
   4064   __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
   4065   __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
   4066 
   4067   // Add (separator length times array_length) - separator length to the
   4068   // string_length to get the length of the result string. array_length is not
   4069   // smi but the other values are, so the result is a smi
   4070   __ ldr(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
   4071   __ sub(string_length, string_length, Operand(scratch1));
   4072   __ smull(scratch2, ip, array_length, scratch1);
   4073   // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
   4074   // zero.
   4075   __ cmp(ip, Operand::Zero());
   4076   __ b(ne, &bailout);
   4077   __ tst(scratch2, Operand(0x80000000));
   4078   __ b(ne, &bailout);
   4079   __ add(string_length, string_length, Operand(scratch2), SetCC);
   4080   __ b(vs, &bailout);
   4081   __ SmiUntag(string_length);
   4082 
   4083   // Get first element in the array to free up the elements register to be used
   4084   // for the result.
   4085   __ add(element,
   4086          elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
   4087   result = elements;  // End of live range for elements.
   4088   elements = no_reg;
   4089   // Live values in registers:
   4090   //   element: First array element
   4091   //   separator: Separator string
   4092   //   string_length: Length of result string (not smi)
   4093   //   array_length: Length of the array.
   4094   __ AllocateAsciiString(result,
   4095                          string_length,
   4096                          scratch1,
   4097                          scratch2,
   4098                          elements_end,
   4099                          &bailout);
   4100   // Prepare for looping. Set up elements_end to end of the array. Set
   4101   // result_pos to the position of the result where to write the first
   4102   // character.
   4103   __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
   4104   result_pos = array_length;  // End of live range for array_length.
   4105   array_length = no_reg;
   4106   __ add(result_pos,
   4107          result,
   4108          Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
   4109 
   4110   // Check the length of the separator.
   4111   __ ldr(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
   4112   __ cmp(scratch1, Operand(Smi::FromInt(1)));
   4113   __ b(eq, &one_char_separator);
   4114   __ b(gt, &long_separator);
   4115 
   4116   // Empty separator case
   4117   __ bind(&empty_separator_loop);
   4118   // Live values in registers:
   4119   //   result_pos: the position to which we are currently copying characters.
   4120   //   element: Current array element.
   4121   //   elements_end: Array end.
   4122 
   4123   // Copy next array element to the result.
   4124   __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
   4125   __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
   4126   __ SmiUntag(string_length);
   4127   __ add(string,
   4128          string,
   4129          Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
   4130   __ CopyBytes(string, result_pos, string_length, scratch1);
   4131   __ cmp(element, elements_end);
   4132   __ b(lt, &empty_separator_loop);  // End while (element < elements_end).
   4133   ASSERT(result.is(r0));
   4134   __ b(&done);
   4135 
   4136   // One-character separator case
   4137   __ bind(&one_char_separator);
   4138   // Replace separator with its ASCII character value.
   4139   __ ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
   4140   // Jump into the loop after the code that copies the separator, so the first
   4141   // element is not preceded by a separator
   4142   __ jmp(&one_char_separator_loop_entry);
   4143 
   4144   __ bind(&one_char_separator_loop);
   4145   // Live values in registers:
   4146   //   result_pos: the position to which we are currently copying characters.
   4147   //   element: Current array element.
   4148   //   elements_end: Array end.
   4149   //   separator: Single separator ASCII char (in lower byte).
   4150 
   4151   // Copy the separator character to the result.
   4152   __ strb(separator, MemOperand(result_pos, 1, PostIndex));
   4153 
   4154   // Copy next array element to the result.
   4155   __ bind(&one_char_separator_loop_entry);
   4156   __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
   4157   __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
   4158   __ SmiUntag(string_length);
   4159   __ add(string,
   4160          string,
   4161          Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
   4162   __ CopyBytes(string, result_pos, string_length, scratch1);
   4163   __ cmp(element, elements_end);
   4164   __ b(lt, &one_char_separator_loop);  // End while (element < elements_end).
   4165   ASSERT(result.is(r0));
   4166   __ b(&done);
   4167 
   4168   // Long separator case (separator is more than one character). Entry is at the
   4169   // label long_separator below.
   4170   __ bind(&long_separator_loop);
   4171   // Live values in registers:
   4172   //   result_pos: the position to which we are currently copying characters.
   4173   //   element: Current array element.
   4174   //   elements_end: Array end.
   4175   //   separator: Separator string.
   4176 
   4177   // Copy the separator to the result.
   4178   __ ldr(string_length, FieldMemOperand(separator, String::kLengthOffset));
   4179   __ SmiUntag(string_length);
   4180   __ add(string,
   4181          separator,
   4182          Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
   4183   __ CopyBytes(string, result_pos, string_length, scratch1);
   4184 
   4185   __ bind(&long_separator);
   4186   __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
   4187   __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
   4188   __ SmiUntag(string_length);
   4189   __ add(string,
   4190          string,
   4191          Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
   4192   __ CopyBytes(string, result_pos, string_length, scratch1);
   4193   __ cmp(element, elements_end);
   4194   __ b(lt, &long_separator_loop);  // End while (element < elements_end).
   4195   ASSERT(result.is(r0));
   4196   __ b(&done);
   4197 
   4198   __ bind(&bailout);
   4199   __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
   4200   __ bind(&done);
   4201   context()->Plug(r0);
   4202 }
   4203 
   4204 
   4205 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
   4206   Handle<String> name = expr->name();
   4207   if (name->length() > 0 && name->Get(0) == '_') {
   4208     Comment cmnt(masm_, "[ InlineRuntimeCall");
   4209     EmitInlineRuntimeCall(expr);
   4210     return;
   4211   }
   4212 
   4213   Comment cmnt(masm_, "[ CallRuntime");
   4214   ZoneList<Expression*>* args = expr->arguments();
   4215 
   4216   if (expr->is_jsruntime()) {
   4217     // Prepare for calling JS runtime function.
   4218     __ ldr(r0, GlobalObjectOperand());
   4219     __ ldr(r0, FieldMemOperand(r0, GlobalObject::kBuiltinsOffset));
   4220     __ push(r0);
   4221   }
   4222 
   4223   // Push the arguments ("left-to-right").
   4224   int arg_count = args->length();
   4225   for (int i = 0; i < arg_count; i++) {
   4226     VisitForStackValue(args->at(i));
   4227   }
   4228 
   4229   if (expr->is_jsruntime()) {
   4230     // Call the JS runtime function.
   4231     __ mov(r2, Operand(expr->name()));
   4232     RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
   4233     Handle<Code> ic =
   4234         isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
   4235     CallIC(ic, mode, expr->CallRuntimeFeedbackId());
   4236     // Restore context register.
   4237     __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
   4238   } else {
   4239     // Call the C runtime function.
   4240     __ CallRuntime(expr->function(), arg_count);
   4241   }
   4242   context()->Plug(r0);
   4243 }
   4244 
   4245 
   4246 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
   4247   switch (expr->op()) {
   4248     case Token::DELETE: {
   4249       Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
   4250       Property* property = expr->expression()->AsProperty();
   4251       VariableProxy* proxy = expr->expression()->AsVariableProxy();
   4252 
   4253       if (property != NULL) {
   4254         VisitForStackValue(property->obj());
   4255         VisitForStackValue(property->key());
   4256         StrictModeFlag strict_mode_flag = (language_mode() == CLASSIC_MODE)
   4257             ? kNonStrictMode : kStrictMode;
   4258         __ mov(r1, Operand(Smi::FromInt(strict_mode_flag)));
   4259         __ push(r1);
   4260         __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
   4261         context()->Plug(r0);
   4262       } else if (proxy != NULL) {
   4263         Variable* var = proxy->var();
   4264         // Delete of an unqualified identifier is disallowed in strict mode
   4265         // but "delete this" is allowed.
   4266         ASSERT(language_mode() == CLASSIC_MODE || var->is_this());
   4267         if (var->IsUnallocated()) {
   4268           __ ldr(r2, GlobalObjectOperand());
   4269           __ mov(r1, Operand(var->name()));
   4270           __ mov(r0, Operand(Smi::FromInt(kNonStrictMode)));
   4271           __ Push(r2, r1, r0);
   4272           __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
   4273           context()->Plug(r0);
   4274         } else if (var->IsStackAllocated() || var->IsContextSlot()) {
   4275           // Result of deleting non-global, non-dynamic variables is false.
   4276           // The subexpression does not have side effects.
   4277           context()->Plug(var->is_this());
   4278         } else {
   4279           // Non-global variable.  Call the runtime to try to delete from the
   4280           // context where the variable was introduced.
   4281           __ push(context_register());
   4282           __ mov(r2, Operand(var->name()));
   4283           __ push(r2);
   4284           __ CallRuntime(Runtime::kDeleteContextSlot, 2);
   4285           context()->Plug(r0);
   4286         }
   4287       } else {
   4288         // Result of deleting non-property, non-variable reference is true.
   4289         // The subexpression may have side effects.
   4290         VisitForEffect(expr->expression());
   4291         context()->Plug(true);
   4292       }
   4293       break;
   4294     }
   4295 
   4296     case Token::VOID: {
   4297       Comment cmnt(masm_, "[ UnaryOperation (VOID)");
   4298       VisitForEffect(expr->expression());
   4299       context()->Plug(Heap::kUndefinedValueRootIndex);
   4300       break;
   4301     }
   4302 
   4303     case Token::NOT: {
   4304       Comment cmnt(masm_, "[ UnaryOperation (NOT)");
   4305       if (context()->IsEffect()) {
   4306         // Unary NOT has no side effects so it's only necessary to visit the
   4307         // subexpression.  Match the optimizing compiler by not branching.
   4308         VisitForEffect(expr->expression());
   4309       } else if (context()->IsTest()) {
   4310         const TestContext* test = TestContext::cast(context());
   4311         // The labels are swapped for the recursive call.
   4312         VisitForControl(expr->expression(),
   4313                         test->false_label(),
   4314                         test->true_label(),
   4315                         test->fall_through());
   4316         context()->Plug(test->true_label(), test->false_label());
   4317       } else {
   4318         // We handle value contexts explicitly rather than simply visiting
   4319         // for control and plugging the control flow into the context,
   4320         // because we need to prepare a pair of extra administrative AST ids
   4321         // for the optimizing compiler.
   4322         ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
   4323         Label materialize_true, materialize_false, done;
   4324         VisitForControl(expr->expression(),
   4325                         &materialize_false,
   4326                         &materialize_true,
   4327                         &materialize_true);
   4328         __ bind(&materialize_true);
   4329         PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
   4330         __ LoadRoot(r0, Heap::kTrueValueRootIndex);
   4331         if (context()->IsStackValue()) __ push(r0);
   4332         __ jmp(&done);
   4333         __ bind(&materialize_false);
   4334         PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
   4335         __ LoadRoot(r0, Heap::kFalseValueRootIndex);
   4336         if (context()->IsStackValue()) __ push(r0);
   4337         __ bind(&done);
   4338       }
   4339       break;
   4340     }
   4341 
   4342     case Token::TYPEOF: {
   4343       Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
   4344       { StackValueContext context(this);
   4345         VisitForTypeofValue(expr->expression());
   4346       }
   4347       __ CallRuntime(Runtime::kTypeof, 1);
   4348       context()->Plug(r0);
   4349       break;
   4350     }
   4351 
   4352     default:
   4353       UNREACHABLE();
   4354   }
   4355 }
   4356 
   4357 
   4358 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
   4359   Comment cmnt(masm_, "[ CountOperation");
   4360   SetSourcePosition(expr->position());
   4361 
   4362   // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
   4363   // as the left-hand side.
   4364   if (!expr->expression()->IsValidLeftHandSide()) {
   4365     VisitForEffect(expr->expression());
   4366     return;
   4367   }
   4368 
   4369   // Expression can only be a property, a global or a (parameter or local)
   4370   // slot.
   4371   enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
   4372   LhsKind assign_type = VARIABLE;
   4373   Property* prop = expr->expression()->AsProperty();
   4374   // In case of a property we use the uninitialized expression context
   4375   // of the key to detect a named property.
   4376   if (prop != NULL) {
   4377     assign_type =
   4378         (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
   4379   }
   4380 
   4381   // Evaluate expression and get value.
   4382   if (assign_type == VARIABLE) {
   4383     ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
   4384     AccumulatorValueContext context(this);
   4385     EmitVariableLoad(expr->expression()->AsVariableProxy());
   4386   } else {
   4387     // Reserve space for result of postfix operation.
   4388     if (expr->is_postfix() && !context()->IsEffect()) {
   4389       __ mov(ip, Operand(Smi::FromInt(0)));
   4390       __ push(ip);
   4391     }
   4392     if (assign_type == NAMED_PROPERTY) {
   4393       // Put the object both on the stack and in the accumulator.
   4394       VisitForAccumulatorValue(prop->obj());
   4395       __ push(r0);
   4396       EmitNamedPropertyLoad(prop);
   4397     } else {
   4398       VisitForStackValue(prop->obj());
   4399       VisitForAccumulatorValue(prop->key());
   4400       __ ldr(r1, MemOperand(sp, 0));
   4401       __ push(r0);
   4402       EmitKeyedPropertyLoad(prop);
   4403     }
   4404   }
   4405 
   4406   // We need a second deoptimization point after loading the value
   4407   // in case evaluating the property load my have a side effect.
   4408   if (assign_type == VARIABLE) {
   4409     PrepareForBailout(expr->expression(), TOS_REG);
   4410   } else {
   4411     PrepareForBailoutForId(prop->LoadId(), TOS_REG);
   4412   }
   4413 
   4414   // Call ToNumber only if operand is not a smi.
   4415   Label no_conversion;
   4416   if (ShouldInlineSmiCase(expr->op())) {
   4417     __ JumpIfSmi(r0, &no_conversion);
   4418   }
   4419   ToNumberStub convert_stub;
   4420   __ CallStub(&convert_stub);
   4421   __ bind(&no_conversion);
   4422 
   4423   // Save result for postfix expressions.
   4424   if (expr->is_postfix()) {
   4425     if (!context()->IsEffect()) {
   4426       // Save the result on the stack. If we have a named or keyed property
   4427       // we store the result under the receiver that is currently on top
   4428       // of the stack.
   4429       switch (assign_type) {
   4430         case VARIABLE:
   4431           __ push(r0);
   4432           break;
   4433         case NAMED_PROPERTY:
   4434           __ str(r0, MemOperand(sp, kPointerSize));
   4435           break;
   4436         case KEYED_PROPERTY:
   4437           __ str(r0, MemOperand(sp, 2 * kPointerSize));
   4438           break;
   4439       }
   4440     }
   4441   }
   4442 
   4443 
   4444   // Inline smi case if we are in a loop.
   4445   Label stub_call, done;
   4446   JumpPatchSite patch_site(masm_);
   4447 
   4448   int count_value = expr->op() == Token::INC ? 1 : -1;
   4449   if (ShouldInlineSmiCase(expr->op())) {
   4450     __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC);
   4451     __ b(vs, &stub_call);
   4452     // We could eliminate this smi check if we split the code at
   4453     // the first smi check before calling ToNumber.
   4454     patch_site.EmitJumpIfSmi(r0, &done);
   4455 
   4456     __ bind(&stub_call);
   4457     // Call stub. Undo operation first.
   4458     __ sub(r0, r0, Operand(Smi::FromInt(count_value)));
   4459   }
   4460   __ mov(r1, r0);
   4461   __ mov(r0, Operand(Smi::FromInt(count_value)));
   4462 
   4463   // Record position before stub call.
   4464   SetSourcePosition(expr->position());
   4465 
   4466   BinaryOpStub stub(Token::ADD, NO_OVERWRITE);
   4467   CallIC(stub.GetCode(isolate()),
   4468          RelocInfo::CODE_TARGET,
   4469          expr->CountBinOpFeedbackId());
   4470   patch_site.EmitPatchInfo();
   4471   __ bind(&done);
   4472 
   4473   // Store the value returned in r0.
   4474   switch (assign_type) {
   4475     case VARIABLE:
   4476       if (expr->is_postfix()) {
   4477         { EffectContext context(this);
   4478           EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
   4479                                  Token::ASSIGN);
   4480           PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   4481           context.Plug(r0);
   4482         }
   4483         // For all contexts except EffectConstant We have the result on
   4484         // top of the stack.
   4485         if (!context()->IsEffect()) {
   4486           context()->PlugTOS();
   4487         }
   4488       } else {
   4489         EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
   4490                                Token::ASSIGN);
   4491         PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   4492         context()->Plug(r0);
   4493       }
   4494       break;
   4495     case NAMED_PROPERTY: {
   4496       __ mov(r2, Operand(prop->key()->AsLiteral()->value()));
   4497       __ pop(r1);
   4498       Handle<Code> ic = is_classic_mode()
   4499           ? isolate()->builtins()->StoreIC_Initialize()
   4500           : isolate()->builtins()->StoreIC_Initialize_Strict();
   4501       CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
   4502       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   4503       if (expr->is_postfix()) {
   4504         if (!context()->IsEffect()) {
   4505           context()->PlugTOS();
   4506         }
   4507       } else {
   4508         context()->Plug(r0);
   4509       }
   4510       break;
   4511     }
   4512     case KEYED_PROPERTY: {
   4513       __ pop(r1);  // Key.
   4514       __ pop(r2);  // Receiver.
   4515       Handle<Code> ic = is_classic_mode()
   4516           ? isolate()->builtins()->KeyedStoreIC_Initialize()
   4517           : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
   4518       CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
   4519       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   4520       if (expr->is_postfix()) {
   4521         if (!context()->IsEffect()) {
   4522           context()->PlugTOS();
   4523         }
   4524       } else {
   4525         context()->Plug(r0);
   4526       }
   4527       break;
   4528     }
   4529   }
   4530 }
   4531 
   4532 
   4533 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
   4534   ASSERT(!context()->IsEffect());
   4535   ASSERT(!context()->IsTest());
   4536   VariableProxy* proxy = expr->AsVariableProxy();
   4537   if (proxy != NULL && proxy->var()->IsUnallocated()) {
   4538     Comment cmnt(masm_, "Global variable");
   4539     __ ldr(r0, GlobalObjectOperand());
   4540     __ mov(r2, Operand(proxy->name()));
   4541     Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
   4542     // Use a regular load, not a contextual load, to avoid a reference
   4543     // error.
   4544     CallIC(ic);
   4545     PrepareForBailout(expr, TOS_REG);
   4546     context()->Plug(r0);
   4547   } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
   4548     Label done, slow;
   4549 
   4550     // Generate code for loading from variables potentially shadowed
   4551     // by eval-introduced variables.
   4552     EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
   4553 
   4554     __ bind(&slow);
   4555     __ mov(r0, Operand(proxy->name()));
   4556     __ Push(cp, r0);
   4557     __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
   4558     PrepareForBailout(expr, TOS_REG);
   4559     __ bind(&done);
   4560 
   4561     context()->Plug(r0);
   4562   } else {
   4563     // This expression cannot throw a reference error at the top level.
   4564     VisitInDuplicateContext(expr);
   4565   }
   4566 }
   4567 
   4568 
   4569 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
   4570                                                  Expression* sub_expr,
   4571                                                  Handle<String> check) {
   4572   Label materialize_true, materialize_false;
   4573   Label* if_true = NULL;
   4574   Label* if_false = NULL;
   4575   Label* fall_through = NULL;
   4576   context()->PrepareTest(&materialize_true, &materialize_false,
   4577                          &if_true, &if_false, &fall_through);
   4578 
   4579   { AccumulatorValueContext context(this);
   4580     VisitForTypeofValue(sub_expr);
   4581   }
   4582   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   4583 
   4584   if (check->Equals(isolate()->heap()->number_string())) {
   4585     __ JumpIfSmi(r0, if_true);
   4586     __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
   4587     __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
   4588     __ cmp(r0, ip);
   4589     Split(eq, if_true, if_false, fall_through);
   4590   } else if (check->Equals(isolate()->heap()->string_string())) {
   4591     __ JumpIfSmi(r0, if_false);
   4592     // Check for undetectable objects => false.
   4593     __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE);
   4594     __ b(ge, if_false);
   4595     __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
   4596     __ tst(r1, Operand(1 << Map::kIsUndetectable));
   4597     Split(eq, if_true, if_false, fall_through);
   4598   } else if (check->Equals(isolate()->heap()->symbol_string())) {
   4599     __ JumpIfSmi(r0, if_false);
   4600     __ CompareObjectType(r0, r0, r1, SYMBOL_TYPE);
   4601     Split(eq, if_true, if_false, fall_through);
   4602   } else if (check->Equals(isolate()->heap()->boolean_string())) {
   4603     __ CompareRoot(r0, Heap::kTrueValueRootIndex);
   4604     __ b(eq, if_true);
   4605     __ CompareRoot(r0, Heap::kFalseValueRootIndex);
   4606     Split(eq, if_true, if_false, fall_through);
   4607   } else if (FLAG_harmony_typeof &&
   4608              check->Equals(isolate()->heap()->null_string())) {
   4609     __ CompareRoot(r0, Heap::kNullValueRootIndex);
   4610     Split(eq, if_true, if_false, fall_through);
   4611   } else if (check->Equals(isolate()->heap()->undefined_string())) {
   4612     __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
   4613     __ b(eq, if_true);
   4614     __ JumpIfSmi(r0, if_false);
   4615     // Check for undetectable objects => true.
   4616     __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
   4617     __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
   4618     __ tst(r1, Operand(1 << Map::kIsUndetectable));
   4619     Split(ne, if_true, if_false, fall_through);
   4620 
   4621   } else if (check->Equals(isolate()->heap()->function_string())) {
   4622     __ JumpIfSmi(r0, if_false);
   4623     STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
   4624     __ CompareObjectType(r0, r0, r1, JS_FUNCTION_TYPE);
   4625     __ b(eq, if_true);
   4626     __ cmp(r1, Operand(JS_FUNCTION_PROXY_TYPE));
   4627     Split(eq, if_true, if_false, fall_through);
   4628   } else if (check->Equals(isolate()->heap()->object_string())) {
   4629     __ JumpIfSmi(r0, if_false);
   4630     if (!FLAG_harmony_typeof) {
   4631       __ CompareRoot(r0, Heap::kNullValueRootIndex);
   4632       __ b(eq, if_true);
   4633     }
   4634     // Check for JS objects => true.
   4635     __ CompareObjectType(r0, r0, r1, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
   4636     __ b(lt, if_false);
   4637     __ CompareInstanceType(r0, r1, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
   4638     __ b(gt, if_false);
   4639     // Check for undetectable objects => false.
   4640     __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
   4641     __ tst(r1, Operand(1 << Map::kIsUndetectable));
   4642     Split(eq, if_true, if_false, fall_through);
   4643   } else {
   4644     if (if_false != fall_through) __ jmp(if_false);
   4645   }
   4646   context()->Plug(if_true, if_false);
   4647 }
   4648 
   4649 
   4650 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
   4651   Comment cmnt(masm_, "[ CompareOperation");
   4652   SetSourcePosition(expr->position());
   4653 
   4654   // First we try a fast inlined version of the compare when one of
   4655   // the operands is a literal.
   4656   if (TryLiteralCompare(expr)) return;
   4657 
   4658   // Always perform the comparison for its control flow.  Pack the result
   4659   // into the expression's context after the comparison is performed.
   4660   Label materialize_true, materialize_false;
   4661   Label* if_true = NULL;
   4662   Label* if_false = NULL;
   4663   Label* fall_through = NULL;
   4664   context()->PrepareTest(&materialize_true, &materialize_false,
   4665                          &if_true, &if_false, &fall_through);
   4666 
   4667   Token::Value op = expr->op();
   4668   VisitForStackValue(expr->left());
   4669   switch (op) {
   4670     case Token::IN:
   4671       VisitForStackValue(expr->right());
   4672       __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
   4673       PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
   4674       __ LoadRoot(ip, Heap::kTrueValueRootIndex);
   4675       __ cmp(r0, ip);
   4676       Split(eq, if_true, if_false, fall_through);
   4677       break;
   4678 
   4679     case Token::INSTANCEOF: {
   4680       VisitForStackValue(expr->right());
   4681       InstanceofStub stub(InstanceofStub::kNoFlags);
   4682       __ CallStub(&stub);
   4683       PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   4684       // The stub returns 0 for true.
   4685       __ tst(r0, r0);
   4686       Split(eq, if_true, if_false, fall_through);
   4687       break;
   4688     }
   4689 
   4690     default: {
   4691       VisitForAccumulatorValue(expr->right());
   4692       Condition cond = CompareIC::ComputeCondition(op);
   4693       __ pop(r1);
   4694 
   4695       bool inline_smi_code = ShouldInlineSmiCase(op);
   4696       JumpPatchSite patch_site(masm_);
   4697       if (inline_smi_code) {
   4698         Label slow_case;
   4699         __ orr(r2, r0, Operand(r1));
   4700         patch_site.EmitJumpIfNotSmi(r2, &slow_case);
   4701         __ cmp(r1, r0);
   4702         Split(cond, if_true, if_false, NULL);
   4703         __ bind(&slow_case);
   4704       }
   4705 
   4706       // Record position and call the compare IC.
   4707       SetSourcePosition(expr->position());
   4708       Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
   4709       CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId());
   4710       patch_site.EmitPatchInfo();
   4711       PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   4712       __ cmp(r0, Operand::Zero());
   4713       Split(cond, if_true, if_false, fall_through);
   4714     }
   4715   }
   4716 
   4717   // Convert the result of the comparison into one expected for this
   4718   // expression's context.
   4719   context()->Plug(if_true, if_false);
   4720 }
   4721 
   4722 
   4723 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
   4724                                               Expression* sub_expr,
   4725                                               NilValue nil) {
   4726   Label materialize_true, materialize_false;
   4727   Label* if_true = NULL;
   4728   Label* if_false = NULL;
   4729   Label* fall_through = NULL;
   4730   context()->PrepareTest(&materialize_true, &materialize_false,
   4731                          &if_true, &if_false, &fall_through);
   4732 
   4733   VisitForAccumulatorValue(sub_expr);
   4734   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   4735   if (expr->op() == Token::EQ_STRICT) {
   4736     Heap::RootListIndex nil_value = nil == kNullValue ?
   4737         Heap::kNullValueRootIndex :
   4738         Heap::kUndefinedValueRootIndex;
   4739     __ LoadRoot(r1, nil_value);
   4740     __ cmp(r0, r1);
   4741     Split(eq, if_true, if_false, fall_through);
   4742   } else {
   4743     Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
   4744     CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId());
   4745     __ cmp(r0, Operand(0));
   4746     Split(ne, if_true, if_false, fall_through);
   4747   }
   4748   context()->Plug(if_true, if_false);
   4749 }
   4750 
   4751 
   4752 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
   4753   __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
   4754   context()->Plug(r0);
   4755 }
   4756 
   4757 
   4758 Register FullCodeGenerator::result_register() {
   4759   return r0;
   4760 }
   4761 
   4762 
   4763 Register FullCodeGenerator::context_register() {
   4764   return cp;
   4765 }
   4766 
   4767 
   4768 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
   4769   ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
   4770   __ str(value, MemOperand(fp, frame_offset));
   4771 }
   4772 
   4773 
   4774 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
   4775   __ ldr(dst, ContextOperand(cp, context_index));
   4776 }
   4777 
   4778 
   4779 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
   4780   Scope* declaration_scope = scope()->DeclarationScope();
   4781   if (declaration_scope->is_global_scope() ||
   4782       declaration_scope->is_module_scope()) {
   4783     // Contexts nested in the native context have a canonical empty function
   4784     // as their closure, not the anonymous closure containing the global
   4785     // code.  Pass a smi sentinel and let the runtime look up the empty
   4786     // function.
   4787     __ mov(ip, Operand(Smi::FromInt(0)));
   4788   } else if (declaration_scope->is_eval_scope()) {
   4789     // Contexts created by a call to eval have the same closure as the
   4790     // context calling eval, not the anonymous closure containing the eval
   4791     // code.  Fetch it from the context.
   4792     __ ldr(ip, ContextOperand(cp, Context::CLOSURE_INDEX));
   4793   } else {
   4794     ASSERT(declaration_scope->is_function_scope());
   4795     __ ldr(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
   4796   }
   4797   __ push(ip);
   4798 }
   4799 
   4800 
   4801 // ----------------------------------------------------------------------------
   4802 // Non-local control flow support.
   4803 
   4804 void FullCodeGenerator::EnterFinallyBlock() {
   4805   ASSERT(!result_register().is(r1));
   4806   // Store result register while executing finally block.
   4807   __ push(result_register());
   4808   // Cook return address in link register to stack (smi encoded Code* delta)
   4809   __ sub(r1, lr, Operand(masm_->CodeObject()));
   4810   __ SmiTag(r1);
   4811 
   4812   // Store result register while executing finally block.
   4813   __ push(r1);
   4814 
   4815   // Store pending message while executing finally block.
   4816   ExternalReference pending_message_obj =
   4817       ExternalReference::address_of_pending_message_obj(isolate());
   4818   __ mov(ip, Operand(pending_message_obj));
   4819   __ ldr(r1, MemOperand(ip));
   4820   __ push(r1);
   4821 
   4822   ExternalReference has_pending_message =
   4823       ExternalReference::address_of_has_pending_message(isolate());
   4824   __ mov(ip, Operand(has_pending_message));
   4825   __ ldr(r1, MemOperand(ip));
   4826   __ SmiTag(r1);
   4827   __ push(r1);
   4828 
   4829   ExternalReference pending_message_script =
   4830       ExternalReference::address_of_pending_message_script(isolate());
   4831   __ mov(ip, Operand(pending_message_script));
   4832   __ ldr(r1, MemOperand(ip));
   4833   __ push(r1);
   4834 }
   4835 
   4836 
   4837 void FullCodeGenerator::ExitFinallyBlock() {
   4838   ASSERT(!result_register().is(r1));
   4839   // Restore pending message from stack.
   4840   __ pop(r1);
   4841   ExternalReference pending_message_script =
   4842       ExternalReference::address_of_pending_message_script(isolate());
   4843   __ mov(ip, Operand(pending_message_script));
   4844   __ str(r1, MemOperand(ip));
   4845 
   4846   __ pop(r1);
   4847   __ SmiUntag(r1);
   4848   ExternalReference has_pending_message =
   4849       ExternalReference::address_of_has_pending_message(isolate());
   4850   __ mov(ip, Operand(has_pending_message));
   4851   __ str(r1, MemOperand(ip));
   4852 
   4853   __ pop(r1);
   4854   ExternalReference pending_message_obj =
   4855       ExternalReference::address_of_pending_message_obj(isolate());
   4856   __ mov(ip, Operand(pending_message_obj));
   4857   __ str(r1, MemOperand(ip));
   4858 
   4859   // Restore result register from stack.
   4860   __ pop(r1);
   4861 
   4862   // Uncook return address and return.
   4863   __ pop(result_register());
   4864   __ SmiUntag(r1);
   4865   __ add(pc, r1, Operand(masm_->CodeObject()));
   4866 }
   4867 
   4868 
   4869 #undef __
   4870 
   4871 #define __ ACCESS_MASM(masm())
   4872 
   4873 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
   4874     int* stack_depth,
   4875     int* context_length) {
   4876   // The macros used here must preserve the result register.
   4877 
   4878   // Because the handler block contains the context of the finally
   4879   // code, we can restore it directly from there for the finally code
   4880   // rather than iteratively unwinding contexts via their previous
   4881   // links.
   4882   __ Drop(*stack_depth);  // Down to the handler block.
   4883   if (*context_length > 0) {
   4884     // Restore the context to its dedicated register and the stack.
   4885     __ ldr(cp, MemOperand(sp, StackHandlerConstants::kContextOffset));
   4886     __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
   4887   }
   4888   __ PopTryHandler();
   4889   __ bl(finally_entry_);
   4890 
   4891   *stack_depth = 0;
   4892   *context_length = 0;
   4893   return previous_;
   4894 }
   4895 
   4896 
   4897 #undef __
   4898 
   4899 } }  // namespace v8::internal
   4900 
   4901 #endif  // V8_TARGET_ARCH_ARM
   4902