Home | History | Annotate | Download | only in arm
      1 // Copyright 2011 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 
     28 #include "v8.h"
     29 
     30 #if defined(V8_TARGET_ARCH_ARM)
     31 
     32 #include "code-stubs.h"
     33 #include "codegen.h"
     34 #include "compiler.h"
     35 #include "debug.h"
     36 #include "full-codegen.h"
     37 #include "parser.h"
     38 #include "scopes.h"
     39 #include "stub-cache.h"
     40 
     41 #include "arm/code-stubs-arm.h"
     42 
     43 namespace v8 {
     44 namespace internal {
     45 
     46 #define __ ACCESS_MASM(masm_)
     47 
     48 
     49 // A patch site is a location in the code which it is possible to patch. This
     50 // class has a number of methods to emit the code which is patchable and the
     51 // method EmitPatchInfo to record a marker back to the patchable code. This
     52 // marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit
     53 // immediate value is used) is the delta from the pc to the first instruction of
     54 // the patchable code.
     55 class JumpPatchSite BASE_EMBEDDED {
     56  public:
     57   explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
     58 #ifdef DEBUG
     59     info_emitted_ = false;
     60 #endif
     61   }
     62 
     63   ~JumpPatchSite() {
     64     ASSERT(patch_site_.is_bound() == info_emitted_);
     65   }
     66 
     67   // When initially emitting this ensure that a jump is always generated to skip
     68   // the inlined smi code.
     69   void EmitJumpIfNotSmi(Register reg, Label* target) {
     70     ASSERT(!patch_site_.is_bound() && !info_emitted_);
     71     __ bind(&patch_site_);
     72     __ cmp(reg, Operand(reg));
     73     // Don't use b(al, ...) as that might emit the constant pool right after the
     74     // branch. After patching when the branch is no longer unconditional
     75     // execution can continue into the constant pool.
     76     __ b(eq, target);  // Always taken before patched.
     77   }
     78 
     79   // When initially emitting this ensure that a jump is never generated to skip
     80   // the inlined smi code.
     81   void EmitJumpIfSmi(Register reg, Label* target) {
     82     ASSERT(!patch_site_.is_bound() && !info_emitted_);
     83     __ bind(&patch_site_);
     84     __ cmp(reg, Operand(reg));
     85     __ b(ne, target);  // Never taken before patched.
     86   }
     87 
     88   void EmitPatchInfo() {
     89     int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
     90     Register reg;
     91     reg.set_code(delta_to_patch_site / kOff12Mask);
     92     __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask);
     93 #ifdef DEBUG
     94     info_emitted_ = true;
     95 #endif
     96   }
     97 
     98   bool is_bound() const { return patch_site_.is_bound(); }
     99 
    100  private:
    101   MacroAssembler* masm_;
    102   Label patch_site_;
    103 #ifdef DEBUG
    104   bool info_emitted_;
    105 #endif
    106 };
    107 
    108 
    109 // Generate code for a JS function.  On entry to the function the receiver
    110 // and arguments have been pushed on the stack left to right.  The actual
    111 // argument count matches the formal parameter count expected by the
    112 // function.
    113 //
    114 // The live registers are:
    115 //   o r1: the JS function object being called (ie, ourselves)
    116 //   o cp: our context
    117 //   o fp: our caller's frame pointer
    118 //   o sp: stack pointer
    119 //   o lr: return address
    120 //
    121 // The function builds a JS frame.  Please see JavaScriptFrameConstants in
    122 // frames-arm.h for its layout.
    123 void FullCodeGenerator::Generate(CompilationInfo* info) {
    124   ASSERT(info_ == NULL);
    125   info_ = info;
    126   SetFunctionPosition(function());
    127   Comment cmnt(masm_, "[ function compiled by full code generator");
    128 
    129 #ifdef DEBUG
    130   if (strlen(FLAG_stop_at) > 0 &&
    131       info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
    132     __ stop("stop-at");
    133   }
    134 #endif
    135 
    136   int locals_count = scope()->num_stack_slots();
    137 
    138   __ Push(lr, fp, cp, r1);
    139   if (locals_count > 0) {
    140     // Load undefined value here, so the value is ready for the loop
    141     // below.
    142     __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
    143   }
    144   // Adjust fp to point to caller's fp.
    145   __ add(fp, sp, Operand(2 * kPointerSize));
    146 
    147   { Comment cmnt(masm_, "[ Allocate locals");
    148     for (int i = 0; i < locals_count; i++) {
    149       __ push(ip);
    150     }
    151   }
    152 
    153   bool function_in_register = true;
    154 
    155   // Possibly allocate a local context.
    156   int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
    157   if (heap_slots > 0) {
    158     Comment cmnt(masm_, "[ Allocate local context");
    159     // Argument to NewContext is the function, which is in r1.
    160     __ push(r1);
    161     if (heap_slots <= FastNewContextStub::kMaximumSlots) {
    162       FastNewContextStub stub(heap_slots);
    163       __ CallStub(&stub);
    164     } else {
    165       __ CallRuntime(Runtime::kNewContext, 1);
    166     }
    167     function_in_register = false;
    168     // Context is returned in both r0 and cp.  It replaces the context
    169     // passed to us.  It's saved in the stack and kept live in cp.
    170     __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
    171     // Copy any necessary parameters into the context.
    172     int num_parameters = scope()->num_parameters();
    173     for (int i = 0; i < num_parameters; i++) {
    174       Slot* slot = scope()->parameter(i)->AsSlot();
    175       if (slot != NULL && slot->type() == Slot::CONTEXT) {
    176         int parameter_offset = StandardFrameConstants::kCallerSPOffset +
    177             (num_parameters - 1 - i) * kPointerSize;
    178         // Load parameter from stack.
    179         __ ldr(r0, MemOperand(fp, parameter_offset));
    180         // Store it in the context.
    181         __ mov(r1, Operand(Context::SlotOffset(slot->index())));
    182         __ str(r0, MemOperand(cp, r1));
    183         // Update the write barrier. This clobbers all involved
    184         // registers, so we have to use two more registers to avoid
    185         // clobbering cp.
    186         __ mov(r2, Operand(cp));
    187         __ RecordWrite(r2, Operand(r1), r3, r0);
    188       }
    189     }
    190   }
    191 
    192   Variable* arguments = scope()->arguments();
    193   if (arguments != NULL) {
    194     // Function uses arguments object.
    195     Comment cmnt(masm_, "[ Allocate arguments object");
    196     if (!function_in_register) {
    197       // Load this again, if it's used by the local context below.
    198       __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
    199     } else {
    200       __ mov(r3, r1);
    201     }
    202     // Receiver is just before the parameters on the caller's stack.
    203     int offset = scope()->num_parameters() * kPointerSize;
    204     __ add(r2, fp,
    205            Operand(StandardFrameConstants::kCallerSPOffset + offset));
    206     __ mov(r1, Operand(Smi::FromInt(scope()->num_parameters())));
    207     __ Push(r3, r2, r1);
    208 
    209     // Arguments to ArgumentsAccessStub:
    210     //   function, receiver address, parameter count.
    211     // The stub will rewrite receiever and parameter count if the previous
    212     // stack frame was an arguments adapter frame.
    213     ArgumentsAccessStub stub(
    214         is_strict_mode() ? ArgumentsAccessStub::NEW_STRICT
    215                          : ArgumentsAccessStub::NEW_NON_STRICT);
    216     __ CallStub(&stub);
    217 
    218     Variable* arguments_shadow = scope()->arguments_shadow();
    219     if (arguments_shadow != NULL) {
    220       // Duplicate the value; move-to-slot operation might clobber registers.
    221       __ mov(r3, r0);
    222       Move(arguments_shadow->AsSlot(), r3, r1, r2);
    223     }
    224     Move(arguments->AsSlot(), r0, r1, r2);
    225   }
    226 
    227   if (FLAG_trace) {
    228     __ CallRuntime(Runtime::kTraceEnter, 0);
    229   }
    230 
    231   // Visit the declarations and body unless there is an illegal
    232   // redeclaration.
    233   if (scope()->HasIllegalRedeclaration()) {
    234     Comment cmnt(masm_, "[ Declarations");
    235     scope()->VisitIllegalRedeclaration(this);
    236 
    237   } else {
    238     { Comment cmnt(masm_, "[ Declarations");
    239       // For named function expressions, declare the function name as a
    240       // constant.
    241       if (scope()->is_function_scope() && scope()->function() != NULL) {
    242         EmitDeclaration(scope()->function(), Variable::CONST, NULL);
    243       }
    244       VisitDeclarations(scope()->declarations());
    245     }
    246 
    247     { Comment cmnt(masm_, "[ Stack check");
    248       PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS);
    249       Label ok;
    250       __ LoadRoot(ip, Heap::kStackLimitRootIndex);
    251       __ cmp(sp, Operand(ip));
    252       __ b(hs, &ok);
    253       StackCheckStub stub;
    254       __ CallStub(&stub);
    255       __ bind(&ok);
    256     }
    257 
    258     { Comment cmnt(masm_, "[ Body");
    259       ASSERT(loop_depth() == 0);
    260       VisitStatements(function()->body());
    261       ASSERT(loop_depth() == 0);
    262     }
    263   }
    264 
    265   // Always emit a 'return undefined' in case control fell off the end of
    266   // the body.
    267   { Comment cmnt(masm_, "[ return <undefined>;");
    268     __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
    269   }
    270   EmitReturnSequence();
    271 
    272   // Force emit the constant pool, so it doesn't get emitted in the middle
    273   // of the stack check table.
    274   masm()->CheckConstPool(true, false);
    275 }
    276 
    277 
    278 void FullCodeGenerator::ClearAccumulator() {
    279   __ mov(r0, Operand(Smi::FromInt(0)));
    280 }
    281 
    282 
    283 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) {
    284   Comment cmnt(masm_, "[ Stack check");
    285   Label ok;
    286   __ LoadRoot(ip, Heap::kStackLimitRootIndex);
    287   __ cmp(sp, Operand(ip));
    288   __ b(hs, &ok);
    289   StackCheckStub stub;
    290   __ CallStub(&stub);
    291   // Record a mapping of this PC offset to the OSR id.  This is used to find
    292   // the AST id from the unoptimized code in order to use it as a key into
    293   // the deoptimization input data found in the optimized code.
    294   RecordStackCheck(stmt->OsrEntryId());
    295 
    296   __ bind(&ok);
    297   PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
    298   // Record a mapping of the OSR id to this PC.  This is used if the OSR
    299   // entry becomes the target of a bailout.  We don't expect it to be, but
    300   // we want it to work if it is.
    301   PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
    302 }
    303 
    304 
    305 void FullCodeGenerator::EmitReturnSequence() {
    306   Comment cmnt(masm_, "[ Return sequence");
    307   if (return_label_.is_bound()) {
    308     __ b(&return_label_);
    309   } else {
    310     __ bind(&return_label_);
    311     if (FLAG_trace) {
    312       // Push the return value on the stack as the parameter.
    313       // Runtime::TraceExit returns its parameter in r0.
    314       __ push(r0);
    315       __ CallRuntime(Runtime::kTraceExit, 1);
    316     }
    317 
    318 #ifdef DEBUG
    319     // Add a label for checking the size of the code used for returning.
    320     Label check_exit_codesize;
    321     masm_->bind(&check_exit_codesize);
    322 #endif
    323     // Make sure that the constant pool is not emitted inside of the return
    324     // sequence.
    325     { Assembler::BlockConstPoolScope block_const_pool(masm_);
    326       // Here we use masm_-> instead of the __ macro to avoid the code coverage
    327       // tool from instrumenting as we rely on the code size here.
    328       int32_t sp_delta = (scope()->num_parameters() + 1) * kPointerSize;
    329       CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
    330       __ RecordJSReturn();
    331       masm_->mov(sp, fp);
    332       masm_->ldm(ia_w, sp, fp.bit() | lr.bit());
    333       masm_->add(sp, sp, Operand(sp_delta));
    334       masm_->Jump(lr);
    335     }
    336 
    337 #ifdef DEBUG
    338     // Check that the size of the code used for returning is large enough
    339     // for the debugger's requirements.
    340     ASSERT(Assembler::kJSReturnSequenceInstructions <=
    341            masm_->InstructionsGeneratedSince(&check_exit_codesize));
    342 #endif
    343   }
    344 }
    345 
    346 
    347 void FullCodeGenerator::EffectContext::Plug(Slot* slot) const {
    348 }
    349 
    350 
    351 void FullCodeGenerator::AccumulatorValueContext::Plug(Slot* slot) const {
    352   codegen()->Move(result_register(), slot);
    353 }
    354 
    355 
    356 void FullCodeGenerator::StackValueContext::Plug(Slot* slot) const {
    357   codegen()->Move(result_register(), slot);
    358   __ push(result_register());
    359 }
    360 
    361 
    362 void FullCodeGenerator::TestContext::Plug(Slot* slot) const {
    363   // For simplicity we always test the accumulator register.
    364   codegen()->Move(result_register(), slot);
    365   codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
    366   codegen()->DoTest(true_label_, false_label_, fall_through_);
    367 }
    368 
    369 
    370 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
    371 }
    372 
    373 
    374 void FullCodeGenerator::AccumulatorValueContext::Plug(
    375     Heap::RootListIndex index) const {
    376   __ LoadRoot(result_register(), index);
    377 }
    378 
    379 
    380 void FullCodeGenerator::StackValueContext::Plug(
    381     Heap::RootListIndex index) const {
    382   __ LoadRoot(result_register(), index);
    383   __ push(result_register());
    384 }
    385 
    386 
    387 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
    388   codegen()->PrepareForBailoutBeforeSplit(TOS_REG,
    389                                           true,
    390                                           true_label_,
    391                                           false_label_);
    392   if (index == Heap::kUndefinedValueRootIndex ||
    393       index == Heap::kNullValueRootIndex ||
    394       index == Heap::kFalseValueRootIndex) {
    395     if (false_label_ != fall_through_) __ b(false_label_);
    396   } else if (index == Heap::kTrueValueRootIndex) {
    397     if (true_label_ != fall_through_) __ b(true_label_);
    398   } else {
    399     __ LoadRoot(result_register(), index);
    400     codegen()->DoTest(true_label_, false_label_, fall_through_);
    401   }
    402 }
    403 
    404 
    405 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
    406 }
    407 
    408 
    409 void FullCodeGenerator::AccumulatorValueContext::Plug(
    410     Handle<Object> lit) const {
    411   __ mov(result_register(), Operand(lit));
    412 }
    413 
    414 
    415 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
    416   // Immediates cannot be pushed directly.
    417   __ mov(result_register(), Operand(lit));
    418   __ push(result_register());
    419 }
    420 
    421 
    422 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
    423   codegen()->PrepareForBailoutBeforeSplit(TOS_REG,
    424                                           true,
    425                                           true_label_,
    426                                           false_label_);
    427   ASSERT(!lit->IsUndetectableObject());  // There are no undetectable literals.
    428   if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
    429     if (false_label_ != fall_through_) __ b(false_label_);
    430   } else if (lit->IsTrue() || lit->IsJSObject()) {
    431     if (true_label_ != fall_through_) __ b(true_label_);
    432   } else if (lit->IsString()) {
    433     if (String::cast(*lit)->length() == 0) {
    434       if (false_label_ != fall_through_) __ b(false_label_);
    435     } else {
    436       if (true_label_ != fall_through_) __ b(true_label_);
    437     }
    438   } else if (lit->IsSmi()) {
    439     if (Smi::cast(*lit)->value() == 0) {
    440       if (false_label_ != fall_through_) __ b(false_label_);
    441     } else {
    442       if (true_label_ != fall_through_) __ b(true_label_);
    443     }
    444   } else {
    445     // For simplicity we always test the accumulator register.
    446     __ mov(result_register(), Operand(lit));
    447     codegen()->DoTest(true_label_, false_label_, fall_through_);
    448   }
    449 }
    450 
    451 
    452 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
    453                                                    Register reg) const {
    454   ASSERT(count > 0);
    455   __ Drop(count);
    456 }
    457 
    458 
    459 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
    460     int count,
    461     Register reg) const {
    462   ASSERT(count > 0);
    463   __ Drop(count);
    464   __ Move(result_register(), reg);
    465 }
    466 
    467 
    468 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
    469                                                        Register reg) const {
    470   ASSERT(count > 0);
    471   if (count > 1) __ Drop(count - 1);
    472   __ str(reg, MemOperand(sp, 0));
    473 }
    474 
    475 
    476 void FullCodeGenerator::TestContext::DropAndPlug(int count,
    477                                                  Register reg) const {
    478   ASSERT(count > 0);
    479   // For simplicity we always test the accumulator register.
    480   __ Drop(count);
    481   __ Move(result_register(), reg);
    482   codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
    483   codegen()->DoTest(true_label_, false_label_, fall_through_);
    484 }
    485 
    486 
    487 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
    488                                             Label* materialize_false) const {
    489   ASSERT(materialize_true == materialize_false);
    490   __ bind(materialize_true);
    491 }
    492 
    493 
    494 void FullCodeGenerator::AccumulatorValueContext::Plug(
    495     Label* materialize_true,
    496     Label* materialize_false) const {
    497   Label done;
    498   __ bind(materialize_true);
    499   __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
    500   __ jmp(&done);
    501   __ bind(materialize_false);
    502   __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
    503   __ bind(&done);
    504 }
    505 
    506 
    507 void FullCodeGenerator::StackValueContext::Plug(
    508     Label* materialize_true,
    509     Label* materialize_false) const {
    510   Label done;
    511   __ bind(materialize_true);
    512   __ LoadRoot(ip, Heap::kTrueValueRootIndex);
    513   __ push(ip);
    514   __ jmp(&done);
    515   __ bind(materialize_false);
    516   __ LoadRoot(ip, Heap::kFalseValueRootIndex);
    517   __ push(ip);
    518   __ bind(&done);
    519 }
    520 
    521 
    522 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
    523                                           Label* materialize_false) const {
    524   ASSERT(materialize_true == true_label_);
    525   ASSERT(materialize_false == false_label_);
    526 }
    527 
    528 
    529 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
    530 }
    531 
    532 
    533 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
    534   Heap::RootListIndex value_root_index =
    535       flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
    536   __ LoadRoot(result_register(), value_root_index);
    537 }
    538 
    539 
    540 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
    541   Heap::RootListIndex value_root_index =
    542       flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
    543   __ LoadRoot(ip, value_root_index);
    544   __ push(ip);
    545 }
    546 
    547 
    548 void FullCodeGenerator::TestContext::Plug(bool flag) const {
    549   codegen()->PrepareForBailoutBeforeSplit(TOS_REG,
    550                                           true,
    551                                           true_label_,
    552                                           false_label_);
    553   if (flag) {
    554     if (true_label_ != fall_through_) __ b(true_label_);
    555   } else {
    556     if (false_label_ != fall_through_) __ b(false_label_);
    557   }
    558 }
    559 
    560 
    561 void FullCodeGenerator::DoTest(Label* if_true,
    562                                Label* if_false,
    563                                Label* fall_through) {
    564   if (CpuFeatures::IsSupported(VFP3)) {
    565     CpuFeatures::Scope scope(VFP3);
    566     // Emit the inlined tests assumed by the stub.
    567     __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
    568     __ cmp(result_register(), ip);
    569     __ b(eq, if_false);
    570     __ LoadRoot(ip, Heap::kTrueValueRootIndex);
    571     __ cmp(result_register(), ip);
    572     __ b(eq, if_true);
    573     __ LoadRoot(ip, Heap::kFalseValueRootIndex);
    574     __ cmp(result_register(), ip);
    575     __ b(eq, if_false);
    576     STATIC_ASSERT(kSmiTag == 0);
    577     __ tst(result_register(), result_register());
    578     __ b(eq, if_false);
    579     __ JumpIfSmi(result_register(), if_true);
    580 
    581     // Call the ToBoolean stub for all other cases.
    582     ToBooleanStub stub(result_register());
    583     __ CallStub(&stub);
    584     __ tst(result_register(), result_register());
    585   } else {
    586     // Call the runtime to find the boolean value of the source and then
    587     // translate it into control flow to the pair of labels.
    588     __ push(result_register());
    589     __ CallRuntime(Runtime::kToBool, 1);
    590     __ LoadRoot(ip, Heap::kFalseValueRootIndex);
    591     __ cmp(r0, ip);
    592   }
    593 
    594   // The stub returns nonzero for true.
    595   Split(ne, if_true, if_false, fall_through);
    596 }
    597 
    598 
    599 void FullCodeGenerator::Split(Condition cond,
    600                               Label* if_true,
    601                               Label* if_false,
    602                               Label* fall_through) {
    603   if (if_false == fall_through) {
    604     __ b(cond, if_true);
    605   } else if (if_true == fall_through) {
    606     __ b(NegateCondition(cond), if_false);
    607   } else {
    608     __ b(cond, if_true);
    609     __ b(if_false);
    610   }
    611 }
    612 
    613 
    614 MemOperand FullCodeGenerator::EmitSlotSearch(Slot* slot, Register scratch) {
    615   switch (slot->type()) {
    616     case Slot::PARAMETER:
    617     case Slot::LOCAL:
    618       return MemOperand(fp, SlotOffset(slot));
    619     case Slot::CONTEXT: {
    620       int context_chain_length =
    621           scope()->ContextChainLength(slot->var()->scope());
    622       __ LoadContext(scratch, context_chain_length);
    623       return ContextOperand(scratch, slot->index());
    624     }
    625     case Slot::LOOKUP:
    626       UNREACHABLE();
    627   }
    628   UNREACHABLE();
    629   return MemOperand(r0, 0);
    630 }
    631 
    632 
    633 void FullCodeGenerator::Move(Register destination, Slot* source) {
    634   // Use destination as scratch.
    635   MemOperand slot_operand = EmitSlotSearch(source, destination);
    636   __ ldr(destination, slot_operand);
    637 }
    638 
    639 
    640 void FullCodeGenerator::Move(Slot* dst,
    641                              Register src,
    642                              Register scratch1,
    643                              Register scratch2) {
    644   ASSERT(dst->type() != Slot::LOOKUP);  // Not yet implemented.
    645   ASSERT(!scratch1.is(src) && !scratch2.is(src));
    646   MemOperand location = EmitSlotSearch(dst, scratch1);
    647   __ str(src, location);
    648   // Emit the write barrier code if the location is in the heap.
    649   if (dst->type() == Slot::CONTEXT) {
    650     __ RecordWrite(scratch1,
    651                    Operand(Context::SlotOffset(dst->index())),
    652                    scratch2,
    653                    src);
    654   }
    655 }
    656 
    657 
    658 void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state,
    659                                                      bool should_normalize,
    660                                                      Label* if_true,
    661                                                      Label* if_false) {
    662   // Only prepare for bailouts before splits if we're in a test
    663   // context. Otherwise, we let the Visit function deal with the
    664   // preparation to avoid preparing with the same AST id twice.
    665   if (!context()->IsTest() || !info_->IsOptimizable()) return;
    666 
    667   Label skip;
    668   if (should_normalize) __ b(&skip);
    669 
    670   ForwardBailoutStack* current = forward_bailout_stack_;
    671   while (current != NULL) {
    672     PrepareForBailout(current->expr(), state);
    673     current = current->parent();
    674   }
    675 
    676   if (should_normalize) {
    677     __ LoadRoot(ip, Heap::kTrueValueRootIndex);
    678     __ cmp(r0, ip);
    679     Split(eq, if_true, if_false, NULL);
    680     __ bind(&skip);
    681   }
    682 }
    683 
    684 
    685 void FullCodeGenerator::EmitDeclaration(Variable* variable,
    686                                         Variable::Mode mode,
    687                                         FunctionLiteral* function) {
    688   Comment cmnt(masm_, "[ Declaration");
    689   ASSERT(variable != NULL);  // Must have been resolved.
    690   Slot* slot = variable->AsSlot();
    691   Property* prop = variable->AsProperty();
    692 
    693   if (slot != NULL) {
    694     switch (slot->type()) {
    695       case Slot::PARAMETER:
    696       case Slot::LOCAL:
    697         if (mode == Variable::CONST) {
    698           __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
    699           __ str(ip, MemOperand(fp, SlotOffset(slot)));
    700         } else if (function != NULL) {
    701           VisitForAccumulatorValue(function);
    702           __ str(result_register(), MemOperand(fp, SlotOffset(slot)));
    703         }
    704         break;
    705 
    706       case Slot::CONTEXT:
    707         // We bypass the general EmitSlotSearch because we know more about
    708         // this specific context.
    709 
    710         // The variable in the decl always resides in the current function
    711         // context.
    712         ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
    713         if (FLAG_debug_code) {
    714           // Check that we're not inside a 'with'.
    715           __ ldr(r1, ContextOperand(cp, Context::FCONTEXT_INDEX));
    716           __ cmp(r1, cp);
    717           __ Check(eq, "Unexpected declaration in current context.");
    718         }
    719         if (mode == Variable::CONST) {
    720           __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
    721           __ str(ip, ContextOperand(cp, slot->index()));
    722           // No write barrier since the_hole_value is in old space.
    723         } else if (function != NULL) {
    724           VisitForAccumulatorValue(function);
    725           __ str(result_register(), ContextOperand(cp, slot->index()));
    726           int offset = Context::SlotOffset(slot->index());
    727           // We know that we have written a function, which is not a smi.
    728           __ mov(r1, Operand(cp));
    729           __ RecordWrite(r1, Operand(offset), r2, result_register());
    730         }
    731         break;
    732 
    733       case Slot::LOOKUP: {
    734         __ mov(r2, Operand(variable->name()));
    735         // Declaration nodes are always introduced in one of two modes.
    736         ASSERT(mode == Variable::VAR ||
    737                mode == Variable::CONST);
    738         PropertyAttributes attr =
    739             (mode == Variable::VAR) ? NONE : READ_ONLY;
    740         __ mov(r1, Operand(Smi::FromInt(attr)));
    741         // Push initial value, if any.
    742         // Note: For variables we must not push an initial value (such as
    743         // 'undefined') because we may have a (legal) redeclaration and we
    744         // must not destroy the current value.
    745         if (mode == Variable::CONST) {
    746           __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
    747           __ Push(cp, r2, r1, r0);
    748         } else if (function != NULL) {
    749           __ Push(cp, r2, r1);
    750           // Push initial value for function declaration.
    751           VisitForStackValue(function);
    752         } else {
    753           __ mov(r0, Operand(Smi::FromInt(0)));  // No initial value!
    754           __ Push(cp, r2, r1, r0);
    755         }
    756         __ CallRuntime(Runtime::kDeclareContextSlot, 4);
    757         break;
    758       }
    759     }
    760 
    761   } else if (prop != NULL) {
    762     if (function != NULL || mode == Variable::CONST) {
    763       // We are declaring a function or constant that rewrites to a
    764       // property.  Use (keyed) IC to set the initial value.  We
    765       // cannot visit the rewrite because it's shared and we risk
    766       // recording duplicate AST IDs for bailouts from optimized code.
    767       ASSERT(prop->obj()->AsVariableProxy() != NULL);
    768       { AccumulatorValueContext for_object(this);
    769         EmitVariableLoad(prop->obj()->AsVariableProxy()->var());
    770       }
    771       if (function != NULL) {
    772         __ push(r0);
    773         VisitForAccumulatorValue(function);
    774         __ pop(r2);
    775       } else {
    776         __ mov(r2, r0);
    777         __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
    778       }
    779       ASSERT(prop->key()->AsLiteral() != NULL &&
    780              prop->key()->AsLiteral()->handle()->IsSmi());
    781       __ mov(r1, Operand(prop->key()->AsLiteral()->handle()));
    782 
    783       Handle<Code> ic = is_strict_mode()
    784           ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
    785           : isolate()->builtins()->KeyedStoreIC_Initialize();
    786       EmitCallIC(ic, RelocInfo::CODE_TARGET);
    787       // Value in r0 is ignored (declarations are statements).
    788     }
    789   }
    790 }
    791 
    792 
    793 void FullCodeGenerator::VisitDeclaration(Declaration* decl) {
    794   EmitDeclaration(decl->proxy()->var(), decl->mode(), decl->fun());
    795 }
    796 
    797 
    798 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
    799   // Call the runtime to declare the globals.
    800   // The context is the first argument.
    801   __ mov(r2, Operand(pairs));
    802   __ mov(r1, Operand(Smi::FromInt(is_eval() ? 1 : 0)));
    803   __ mov(r0, Operand(Smi::FromInt(strict_mode_flag())));
    804   __ Push(cp, r2, r1, r0);
    805   __ CallRuntime(Runtime::kDeclareGlobals, 4);
    806   // Return value is ignored.
    807 }
    808 
    809 
    810 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
    811   Comment cmnt(masm_, "[ SwitchStatement");
    812   Breakable nested_statement(this, stmt);
    813   SetStatementPosition(stmt);
    814 
    815   // Keep the switch value on the stack until a case matches.
    816   VisitForStackValue(stmt->tag());
    817   PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
    818 
    819   ZoneList<CaseClause*>* clauses = stmt->cases();
    820   CaseClause* default_clause = NULL;  // Can occur anywhere in the list.
    821 
    822   Label next_test;  // Recycled for each test.
    823   // Compile all the tests with branches to their bodies.
    824   for (int i = 0; i < clauses->length(); i++) {
    825     CaseClause* clause = clauses->at(i);
    826     clause->body_target()->Unuse();
    827 
    828     // The default is not a test, but remember it as final fall through.
    829     if (clause->is_default()) {
    830       default_clause = clause;
    831       continue;
    832     }
    833 
    834     Comment cmnt(masm_, "[ Case comparison");
    835     __ bind(&next_test);
    836     next_test.Unuse();
    837 
    838     // Compile the label expression.
    839     VisitForAccumulatorValue(clause->label());
    840 
    841     // Perform the comparison as if via '==='.
    842     __ ldr(r1, MemOperand(sp, 0));  // Switch value.
    843     bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
    844     JumpPatchSite patch_site(masm_);
    845     if (inline_smi_code) {
    846       Label slow_case;
    847       __ orr(r2, r1, r0);
    848       patch_site.EmitJumpIfNotSmi(r2, &slow_case);
    849 
    850       __ cmp(r1, r0);
    851       __ b(ne, &next_test);
    852       __ Drop(1);  // Switch value is no longer needed.
    853       __ b(clause->body_target());
    854       __ bind(&slow_case);
    855     }
    856 
    857     // Record position before stub call for type feedback.
    858     SetSourcePosition(clause->position());
    859     Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
    860     EmitCallIC(ic, &patch_site);
    861     __ cmp(r0, Operand(0));
    862     __ b(ne, &next_test);
    863     __ Drop(1);  // Switch value is no longer needed.
    864     __ b(clause->body_target());
    865   }
    866 
    867   // Discard the test value and jump to the default if present, otherwise to
    868   // the end of the statement.
    869   __ bind(&next_test);
    870   __ Drop(1);  // Switch value is no longer needed.
    871   if (default_clause == NULL) {
    872     __ b(nested_statement.break_target());
    873   } else {
    874     __ b(default_clause->body_target());
    875   }
    876 
    877   // Compile all the case bodies.
    878   for (int i = 0; i < clauses->length(); i++) {
    879     Comment cmnt(masm_, "[ Case body");
    880     CaseClause* clause = clauses->at(i);
    881     __ bind(clause->body_target());
    882     PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
    883     VisitStatements(clause->statements());
    884   }
    885 
    886   __ bind(nested_statement.break_target());
    887   PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
    888 }
    889 
    890 
    891 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
    892   Comment cmnt(masm_, "[ ForInStatement");
    893   SetStatementPosition(stmt);
    894 
    895   Label loop, exit;
    896   ForIn loop_statement(this, stmt);
    897   increment_loop_depth();
    898 
    899   // Get the object to enumerate over. Both SpiderMonkey and JSC
    900   // ignore null and undefined in contrast to the specification; see
    901   // ECMA-262 section 12.6.4.
    902   VisitForAccumulatorValue(stmt->enumerable());
    903   __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
    904   __ cmp(r0, ip);
    905   __ b(eq, &exit);
    906   Register null_value = r5;
    907   __ LoadRoot(null_value, Heap::kNullValueRootIndex);
    908   __ cmp(r0, null_value);
    909   __ b(eq, &exit);
    910 
    911   // Convert the object to a JS object.
    912   Label convert, done_convert;
    913   __ JumpIfSmi(r0, &convert);
    914   __ CompareObjectType(r0, r1, r1, FIRST_JS_OBJECT_TYPE);
    915   __ b(hs, &done_convert);
    916   __ bind(&convert);
    917   __ push(r0);
    918   __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_JS);
    919   __ bind(&done_convert);
    920   __ push(r0);
    921 
    922   // Check cache validity in generated code. This is a fast case for
    923   // the JSObject::IsSimpleEnum cache validity checks. If we cannot
    924   // guarantee cache validity, call the runtime system to check cache
    925   // validity or get the property names in a fixed array.
    926   Label next, call_runtime;
    927   // Preload a couple of values used in the loop.
    928   Register  empty_fixed_array_value = r6;
    929   __ LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
    930   Register empty_descriptor_array_value = r7;
    931   __ LoadRoot(empty_descriptor_array_value,
    932               Heap::kEmptyDescriptorArrayRootIndex);
    933   __ mov(r1, r0);
    934   __ bind(&next);
    935 
    936   // Check that there are no elements.  Register r1 contains the
    937   // current JS object we've reached through the prototype chain.
    938   __ ldr(r2, FieldMemOperand(r1, JSObject::kElementsOffset));
    939   __ cmp(r2, empty_fixed_array_value);
    940   __ b(ne, &call_runtime);
    941 
    942   // Check that instance descriptors are not empty so that we can
    943   // check for an enum cache.  Leave the map in r2 for the subsequent
    944   // prototype load.
    945   __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
    946   __ ldr(r3, FieldMemOperand(r2, Map::kInstanceDescriptorsOffset));
    947   __ cmp(r3, empty_descriptor_array_value);
    948   __ b(eq, &call_runtime);
    949 
    950   // Check that there is an enum cache in the non-empty instance
    951   // descriptors (r3).  This is the case if the next enumeration
    952   // index field does not contain a smi.
    953   __ ldr(r3, FieldMemOperand(r3, DescriptorArray::kEnumerationIndexOffset));
    954   __ JumpIfSmi(r3, &call_runtime);
    955 
    956   // For all objects but the receiver, check that the cache is empty.
    957   Label check_prototype;
    958   __ cmp(r1, r0);
    959   __ b(eq, &check_prototype);
    960   __ ldr(r3, FieldMemOperand(r3, DescriptorArray::kEnumCacheBridgeCacheOffset));
    961   __ cmp(r3, empty_fixed_array_value);
    962   __ b(ne, &call_runtime);
    963 
    964   // Load the prototype from the map and loop if non-null.
    965   __ bind(&check_prototype);
    966   __ ldr(r1, FieldMemOperand(r2, Map::kPrototypeOffset));
    967   __ cmp(r1, null_value);
    968   __ b(ne, &next);
    969 
    970   // The enum cache is valid.  Load the map of the object being
    971   // iterated over and use the cache for the iteration.
    972   Label use_cache;
    973   __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
    974   __ b(&use_cache);
    975 
    976   // Get the set of properties to enumerate.
    977   __ bind(&call_runtime);
    978   __ push(r0);  // Duplicate the enumerable object on the stack.
    979   __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
    980 
    981   // If we got a map from the runtime call, we can do a fast
    982   // modification check. Otherwise, we got a fixed array, and we have
    983   // to do a slow check.
    984   Label fixed_array;
    985   __ mov(r2, r0);
    986   __ ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
    987   __ LoadRoot(ip, Heap::kMetaMapRootIndex);
    988   __ cmp(r1, ip);
    989   __ b(ne, &fixed_array);
    990 
    991   // We got a map in register r0. Get the enumeration cache from it.
    992   __ bind(&use_cache);
    993   __ ldr(r1, FieldMemOperand(r0, Map::kInstanceDescriptorsOffset));
    994   __ ldr(r1, FieldMemOperand(r1, DescriptorArray::kEnumerationIndexOffset));
    995   __ ldr(r2, FieldMemOperand(r1, DescriptorArray::kEnumCacheBridgeCacheOffset));
    996 
    997   // Setup the four remaining stack slots.
    998   __ push(r0);  // Map.
    999   __ ldr(r1, FieldMemOperand(r2, FixedArray::kLengthOffset));
   1000   __ mov(r0, Operand(Smi::FromInt(0)));
   1001   // Push enumeration cache, enumeration cache length (as smi) and zero.
   1002   __ Push(r2, r1, r0);
   1003   __ jmp(&loop);
   1004 
   1005   // We got a fixed array in register r0. Iterate through that.
   1006   __ bind(&fixed_array);
   1007   __ mov(r1, Operand(Smi::FromInt(0)));  // Map (0) - force slow check.
   1008   __ Push(r1, r0);
   1009   __ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset));
   1010   __ mov(r0, Operand(Smi::FromInt(0)));
   1011   __ Push(r1, r0);  // Fixed array length (as smi) and initial index.
   1012 
   1013   // Generate code for doing the condition check.
   1014   __ bind(&loop);
   1015   // Load the current count to r0, load the length to r1.
   1016   __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize));
   1017   __ cmp(r0, r1);  // Compare to the array length.
   1018   __ b(hs, loop_statement.break_target());
   1019 
   1020   // Get the current entry of the array into register r3.
   1021   __ ldr(r2, MemOperand(sp, 2 * kPointerSize));
   1022   __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
   1023   __ ldr(r3, MemOperand(r2, r0, LSL, kPointerSizeLog2 - kSmiTagSize));
   1024 
   1025   // Get the expected map from the stack or a zero map in the
   1026   // permanent slow case into register r2.
   1027   __ ldr(r2, MemOperand(sp, 3 * kPointerSize));
   1028 
   1029   // Check if the expected map still matches that of the enumerable.
   1030   // If not, we have to filter the key.
   1031   Label update_each;
   1032   __ ldr(r1, MemOperand(sp, 4 * kPointerSize));
   1033   __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
   1034   __ cmp(r4, Operand(r2));
   1035   __ b(eq, &update_each);
   1036 
   1037   // Convert the entry to a string or (smi) 0 if it isn't a property
   1038   // any more. If the property has been removed while iterating, we
   1039   // just skip it.
   1040   __ push(r1);  // Enumerable.
   1041   __ push(r3);  // Current entry.
   1042   __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_JS);
   1043   __ mov(r3, Operand(r0), SetCC);
   1044   __ b(eq, loop_statement.continue_target());
   1045 
   1046   // Update the 'each' property or variable from the possibly filtered
   1047   // entry in register r3.
   1048   __ bind(&update_each);
   1049   __ mov(result_register(), r3);
   1050   // Perform the assignment as if via '='.
   1051   { EffectContext context(this);
   1052     EmitAssignment(stmt->each(), stmt->AssignmentId());
   1053   }
   1054 
   1055   // Generate code for the body of the loop.
   1056   Visit(stmt->body());
   1057 
   1058   // Generate code for the going to the next element by incrementing
   1059   // the index (smi) stored on top of the stack.
   1060   __ bind(loop_statement.continue_target());
   1061   __ pop(r0);
   1062   __ add(r0, r0, Operand(Smi::FromInt(1)));
   1063   __ push(r0);
   1064 
   1065   EmitStackCheck(stmt);
   1066   __ b(&loop);
   1067 
   1068   // Remove the pointers stored on the stack.
   1069   __ bind(loop_statement.break_target());
   1070   __ Drop(5);
   1071 
   1072   // Exit and decrement the loop depth.
   1073   __ bind(&exit);
   1074   decrement_loop_depth();
   1075 }
   1076 
   1077 
   1078 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
   1079                                        bool pretenure) {
   1080   // Use the fast case closure allocation code that allocates in new
   1081   // space for nested functions that don't need literals cloning. If
   1082   // we're running with the --always-opt or the --prepare-always-opt
   1083   // flag, we need to use the runtime function so that the new function
   1084   // we are creating here gets a chance to have its code optimized and
   1085   // doesn't just get a copy of the existing unoptimized code.
   1086   if (!FLAG_always_opt &&
   1087       !FLAG_prepare_always_opt &&
   1088       !pretenure &&
   1089       scope()->is_function_scope() &&
   1090       info->num_literals() == 0) {
   1091     FastNewClosureStub stub(info->strict_mode() ? kStrictMode : kNonStrictMode);
   1092     __ mov(r0, Operand(info));
   1093     __ push(r0);
   1094     __ CallStub(&stub);
   1095   } else {
   1096     __ mov(r0, Operand(info));
   1097     __ LoadRoot(r1, pretenure ? Heap::kTrueValueRootIndex
   1098                               : Heap::kFalseValueRootIndex);
   1099     __ Push(cp, r0, r1);
   1100     __ CallRuntime(Runtime::kNewClosure, 3);
   1101   }
   1102   context()->Plug(r0);
   1103 }
   1104 
   1105 
   1106 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
   1107   Comment cmnt(masm_, "[ VariableProxy");
   1108   EmitVariableLoad(expr->var());
   1109 }
   1110 
   1111 
   1112 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(
   1113     Slot* slot,
   1114     Label* slow) {
   1115   ASSERT(slot->type() == Slot::CONTEXT);
   1116   Register context = cp;
   1117   Register next = r3;
   1118   Register temp = r4;
   1119 
   1120   for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) {
   1121     if (s->num_heap_slots() > 0) {
   1122       if (s->calls_eval()) {
   1123         // Check that extension is NULL.
   1124         __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
   1125         __ tst(temp, temp);
   1126         __ b(ne, slow);
   1127       }
   1128       __ ldr(next, ContextOperand(context, Context::CLOSURE_INDEX));
   1129       __ ldr(next, FieldMemOperand(next, JSFunction::kContextOffset));
   1130       // Walk the rest of the chain without clobbering cp.
   1131       context = next;
   1132     }
   1133   }
   1134   // Check that last extension is NULL.
   1135   __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
   1136   __ tst(temp, temp);
   1137   __ b(ne, slow);
   1138 
   1139   // This function is used only for loads, not stores, so it's safe to
   1140   // return an cp-based operand (the write barrier cannot be allowed to
   1141   // destroy the cp register).
   1142   return ContextOperand(context, slot->index());
   1143 }
   1144 
   1145 
   1146 void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase(
   1147     Slot* slot,
   1148     TypeofState typeof_state,
   1149     Label* slow,
   1150     Label* done) {
   1151   // Generate fast-case code for variables that might be shadowed by
   1152   // eval-introduced variables.  Eval is used a lot without
   1153   // introducing variables.  In those cases, we do not want to
   1154   // perform a runtime call for all variables in the scope
   1155   // containing the eval.
   1156   if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) {
   1157     EmitLoadGlobalSlotCheckExtensions(slot, typeof_state, slow);
   1158     __ jmp(done);
   1159   } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) {
   1160     Slot* potential_slot = slot->var()->local_if_not_shadowed()->AsSlot();
   1161     Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite();
   1162     if (potential_slot != NULL) {
   1163       // Generate fast case for locals that rewrite to slots.
   1164       __ ldr(r0, ContextSlotOperandCheckExtensions(potential_slot, slow));
   1165       if (potential_slot->var()->mode() == Variable::CONST) {
   1166         __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
   1167         __ cmp(r0, ip);
   1168         __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
   1169       }
   1170       __ jmp(done);
   1171     } else if (rewrite != NULL) {
   1172       // Generate fast case for calls of an argument function.
   1173       Property* property = rewrite->AsProperty();
   1174       if (property != NULL) {
   1175         VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
   1176         Literal* key_literal = property->key()->AsLiteral();
   1177         if (obj_proxy != NULL &&
   1178             key_literal != NULL &&
   1179             obj_proxy->IsArguments() &&
   1180             key_literal->handle()->IsSmi()) {
   1181           // Load arguments object if there are no eval-introduced
   1182           // variables. Then load the argument from the arguments
   1183           // object using keyed load.
   1184           __ ldr(r1,
   1185                  ContextSlotOperandCheckExtensions(obj_proxy->var()->AsSlot(),
   1186                                                    slow));
   1187           __ mov(r0, Operand(key_literal->handle()));
   1188           Handle<Code> ic =
   1189               isolate()->builtins()->KeyedLoadIC_Initialize();
   1190           EmitCallIC(ic, RelocInfo::CODE_TARGET);
   1191           __ jmp(done);
   1192         }
   1193       }
   1194     }
   1195   }
   1196 }
   1197 
   1198 
   1199 void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions(
   1200     Slot* slot,
   1201     TypeofState typeof_state,
   1202     Label* slow) {
   1203   Register current = cp;
   1204   Register next = r1;
   1205   Register temp = r2;
   1206 
   1207   Scope* s = scope();
   1208   while (s != NULL) {
   1209     if (s->num_heap_slots() > 0) {
   1210       if (s->calls_eval()) {
   1211         // Check that extension is NULL.
   1212         __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX));
   1213         __ tst(temp, temp);
   1214         __ b(ne, slow);
   1215       }
   1216       // Load next context in chain.
   1217       __ ldr(next, ContextOperand(current, Context::CLOSURE_INDEX));
   1218       __ ldr(next, FieldMemOperand(next, JSFunction::kContextOffset));
   1219       // Walk the rest of the chain without clobbering cp.
   1220       current = next;
   1221     }
   1222     // If no outer scope calls eval, we do not need to check more
   1223     // context extensions.
   1224     if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
   1225     s = s->outer_scope();
   1226   }
   1227 
   1228   if (s->is_eval_scope()) {
   1229     Label loop, fast;
   1230     if (!current.is(next)) {
   1231       __ Move(next, current);
   1232     }
   1233     __ bind(&loop);
   1234     // Terminate at global context.
   1235     __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
   1236     __ LoadRoot(ip, Heap::kGlobalContextMapRootIndex);
   1237     __ cmp(temp, ip);
   1238     __ b(eq, &fast);
   1239     // Check that extension is NULL.
   1240     __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX));
   1241     __ tst(temp, temp);
   1242     __ b(ne, slow);
   1243     // Load next context in chain.
   1244     __ ldr(next, ContextOperand(next, Context::CLOSURE_INDEX));
   1245     __ ldr(next, FieldMemOperand(next, JSFunction::kContextOffset));
   1246     __ b(&loop);
   1247     __ bind(&fast);
   1248   }
   1249 
   1250   __ ldr(r0, GlobalObjectOperand());
   1251   __ mov(r2, Operand(slot->var()->name()));
   1252   RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
   1253       ? RelocInfo::CODE_TARGET
   1254       : RelocInfo::CODE_TARGET_CONTEXT;
   1255   Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
   1256   EmitCallIC(ic, mode);
   1257 }
   1258 
   1259 
   1260 void FullCodeGenerator::EmitVariableLoad(Variable* var) {
   1261   // Four cases: non-this global variables, lookup slots, all other
   1262   // types of slots, and parameters that rewrite to explicit property
   1263   // accesses on the arguments object.
   1264   Slot* slot = var->AsSlot();
   1265   Property* property = var->AsProperty();
   1266 
   1267   if (var->is_global() && !var->is_this()) {
   1268     Comment cmnt(masm_, "Global variable");
   1269     // Use inline caching. Variable name is passed in r2 and the global
   1270     // object (receiver) in r0.
   1271     __ ldr(r0, GlobalObjectOperand());
   1272     __ mov(r2, Operand(var->name()));
   1273     Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
   1274     EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
   1275     context()->Plug(r0);
   1276 
   1277   } else if (slot != NULL && slot->type() == Slot::LOOKUP) {
   1278     Label done, slow;
   1279 
   1280     // Generate code for loading from variables potentially shadowed
   1281     // by eval-introduced variables.
   1282     EmitDynamicLoadFromSlotFastCase(slot, NOT_INSIDE_TYPEOF, &slow, &done);
   1283 
   1284     __ bind(&slow);
   1285     Comment cmnt(masm_, "Lookup slot");
   1286     __ mov(r1, Operand(var->name()));
   1287     __ Push(cp, r1);  // Context and name.
   1288     __ CallRuntime(Runtime::kLoadContextSlot, 2);
   1289     __ bind(&done);
   1290 
   1291     context()->Plug(r0);
   1292 
   1293   } else if (slot != NULL) {
   1294     Comment cmnt(masm_, (slot->type() == Slot::CONTEXT)
   1295                             ? "Context slot"
   1296                             : "Stack slot");
   1297     if (var->mode() == Variable::CONST) {
   1298       // Constants may be the hole value if they have not been initialized.
   1299       // Unhole them.
   1300       MemOperand slot_operand = EmitSlotSearch(slot, r0);
   1301       __ ldr(r0, slot_operand);
   1302       __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
   1303       __ cmp(r0, ip);
   1304       __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
   1305       context()->Plug(r0);
   1306     } else {
   1307       context()->Plug(slot);
   1308     }
   1309   } else {
   1310     Comment cmnt(masm_, "Rewritten parameter");
   1311     ASSERT_NOT_NULL(property);
   1312     // Rewritten parameter accesses are of the form "slot[literal]".
   1313 
   1314     // Assert that the object is in a slot.
   1315     Variable* object_var = property->obj()->AsVariableProxy()->AsVariable();
   1316     ASSERT_NOT_NULL(object_var);
   1317     Slot* object_slot = object_var->AsSlot();
   1318     ASSERT_NOT_NULL(object_slot);
   1319 
   1320     // Load the object.
   1321     Move(r1, object_slot);
   1322 
   1323     // Assert that the key is a smi.
   1324     Literal* key_literal = property->key()->AsLiteral();
   1325     ASSERT_NOT_NULL(key_literal);
   1326     ASSERT(key_literal->handle()->IsSmi());
   1327 
   1328     // Load the key.
   1329     __ mov(r0, Operand(key_literal->handle()));
   1330 
   1331     // Call keyed load IC. It has arguments key and receiver in r0 and r1.
   1332     Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
   1333     EmitCallIC(ic, RelocInfo::CODE_TARGET);
   1334     context()->Plug(r0);
   1335   }
   1336 }
   1337 
   1338 
   1339 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
   1340   Comment cmnt(masm_, "[ RegExpLiteral");
   1341   Label materialized;
   1342   // Registers will be used as follows:
   1343   // r5 = materialized value (RegExp literal)
   1344   // r4 = JS function, literals array
   1345   // r3 = literal index
   1346   // r2 = RegExp pattern
   1347   // r1 = RegExp flags
   1348   // r0 = RegExp literal clone
   1349   __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
   1350   __ ldr(r4, FieldMemOperand(r0, JSFunction::kLiteralsOffset));
   1351   int literal_offset =
   1352       FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
   1353   __ ldr(r5, FieldMemOperand(r4, literal_offset));
   1354   __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
   1355   __ cmp(r5, ip);
   1356   __ b(ne, &materialized);
   1357 
   1358   // Create regexp literal using runtime function.
   1359   // Result will be in r0.
   1360   __ mov(r3, Operand(Smi::FromInt(expr->literal_index())));
   1361   __ mov(r2, Operand(expr->pattern()));
   1362   __ mov(r1, Operand(expr->flags()));
   1363   __ Push(r4, r3, r2, r1);
   1364   __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
   1365   __ mov(r5, r0);
   1366 
   1367   __ bind(&materialized);
   1368   int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
   1369   Label allocated, runtime_allocate;
   1370   __ AllocateInNewSpace(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
   1371   __ jmp(&allocated);
   1372 
   1373   __ bind(&runtime_allocate);
   1374   __ push(r5);
   1375   __ mov(r0, Operand(Smi::FromInt(size)));
   1376   __ push(r0);
   1377   __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
   1378   __ pop(r5);
   1379 
   1380   __ bind(&allocated);
   1381   // After this, registers are used as follows:
   1382   // r0: Newly allocated regexp.
   1383   // r5: Materialized regexp.
   1384   // r2: temp.
   1385   __ CopyFields(r0, r5, r2.bit(), size / kPointerSize);
   1386   context()->Plug(r0);
   1387 }
   1388 
   1389 
   1390 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
   1391   Comment cmnt(masm_, "[ ObjectLiteral");
   1392   __ ldr(r3, MemOperand(fp,  JavaScriptFrameConstants::kFunctionOffset));
   1393   __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
   1394   __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
   1395   __ mov(r1, Operand(expr->constant_properties()));
   1396   int flags = expr->fast_elements()
   1397       ? ObjectLiteral::kFastElements
   1398       : ObjectLiteral::kNoFlags;
   1399   flags |= expr->has_function()
   1400       ? ObjectLiteral::kHasFunction
   1401       : ObjectLiteral::kNoFlags;
   1402   __ mov(r0, Operand(Smi::FromInt(flags)));
   1403   __ Push(r3, r2, r1, r0);
   1404   if (expr->depth() > 1) {
   1405     __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
   1406   } else {
   1407     __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
   1408   }
   1409 
   1410   // If result_saved is true the result is on top of the stack.  If
   1411   // result_saved is false the result is in r0.
   1412   bool result_saved = false;
   1413 
   1414   // Mark all computed expressions that are bound to a key that
   1415   // is shadowed by a later occurrence of the same key. For the
   1416   // marked expressions, no store code is emitted.
   1417   expr->CalculateEmitStore();
   1418 
   1419   for (int i = 0; i < expr->properties()->length(); i++) {
   1420     ObjectLiteral::Property* property = expr->properties()->at(i);
   1421     if (property->IsCompileTimeValue()) continue;
   1422 
   1423     Literal* key = property->key();
   1424     Expression* value = property->value();
   1425     if (!result_saved) {
   1426       __ push(r0);  // Save result on stack
   1427       result_saved = true;
   1428     }
   1429     switch (property->kind()) {
   1430       case ObjectLiteral::Property::CONSTANT:
   1431         UNREACHABLE();
   1432       case ObjectLiteral::Property::MATERIALIZED_LITERAL:
   1433         ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value()));
   1434         // Fall through.
   1435       case ObjectLiteral::Property::COMPUTED:
   1436         if (key->handle()->IsSymbol()) {
   1437           if (property->emit_store()) {
   1438             VisitForAccumulatorValue(value);
   1439             __ mov(r2, Operand(key->handle()));
   1440             __ ldr(r1, MemOperand(sp));
   1441             Handle<Code> ic = isolate()->builtins()->StoreIC_Initialize();
   1442             EmitCallIC(ic, RelocInfo::CODE_TARGET);
   1443             PrepareForBailoutForId(key->id(), NO_REGISTERS);
   1444           } else {
   1445             VisitForEffect(value);
   1446           }
   1447           break;
   1448         }
   1449         // Fall through.
   1450       case ObjectLiteral::Property::PROTOTYPE:
   1451         // Duplicate receiver on stack.
   1452         __ ldr(r0, MemOperand(sp));
   1453         __ push(r0);
   1454         VisitForStackValue(key);
   1455         VisitForStackValue(value);
   1456         if (property->emit_store()) {
   1457           __ mov(r0, Operand(Smi::FromInt(NONE)));  // PropertyAttributes
   1458           __ push(r0);
   1459           __ CallRuntime(Runtime::kSetProperty, 4);
   1460         } else {
   1461           __ Drop(3);
   1462         }
   1463         break;
   1464       case ObjectLiteral::Property::GETTER:
   1465       case ObjectLiteral::Property::SETTER:
   1466         // Duplicate receiver on stack.
   1467         __ ldr(r0, MemOperand(sp));
   1468         __ push(r0);
   1469         VisitForStackValue(key);
   1470         __ mov(r1, Operand(property->kind() == ObjectLiteral::Property::SETTER ?
   1471                            Smi::FromInt(1) :
   1472                            Smi::FromInt(0)));
   1473         __ push(r1);
   1474         VisitForStackValue(value);
   1475         __ CallRuntime(Runtime::kDefineAccessor, 4);
   1476         break;
   1477     }
   1478   }
   1479 
   1480   if (expr->has_function()) {
   1481     ASSERT(result_saved);
   1482     __ ldr(r0, MemOperand(sp));
   1483     __ push(r0);
   1484     __ CallRuntime(Runtime::kToFastProperties, 1);
   1485   }
   1486 
   1487   if (result_saved) {
   1488     context()->PlugTOS();
   1489   } else {
   1490     context()->Plug(r0);
   1491   }
   1492 }
   1493 
   1494 
   1495 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
   1496   Comment cmnt(masm_, "[ ArrayLiteral");
   1497 
   1498   ZoneList<Expression*>* subexprs = expr->values();
   1499   int length = subexprs->length();
   1500 
   1501   __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
   1502   __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
   1503   __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
   1504   __ mov(r1, Operand(expr->constant_elements()));
   1505   __ Push(r3, r2, r1);
   1506   if (expr->constant_elements()->map() ==
   1507       isolate()->heap()->fixed_cow_array_map()) {
   1508     FastCloneShallowArrayStub stub(
   1509         FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
   1510     __ CallStub(&stub);
   1511     __ IncrementCounter(
   1512         isolate()->counters()->cow_arrays_created_stub(), 1, r1, r2);
   1513   } else if (expr->depth() > 1) {
   1514     __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
   1515   } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
   1516     __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
   1517   } else {
   1518     FastCloneShallowArrayStub stub(
   1519         FastCloneShallowArrayStub::CLONE_ELEMENTS, length);
   1520     __ CallStub(&stub);
   1521   }
   1522 
   1523   bool result_saved = false;  // Is the result saved to the stack?
   1524 
   1525   // Emit code to evaluate all the non-constant subexpressions and to store
   1526   // them into the newly cloned array.
   1527   for (int i = 0; i < length; i++) {
   1528     Expression* subexpr = subexprs->at(i);
   1529     // If the subexpression is a literal or a simple materialized literal it
   1530     // is already set in the cloned array.
   1531     if (subexpr->AsLiteral() != NULL ||
   1532         CompileTimeValue::IsCompileTimeValue(subexpr)) {
   1533       continue;
   1534     }
   1535 
   1536     if (!result_saved) {
   1537       __ push(r0);
   1538       result_saved = true;
   1539     }
   1540     VisitForAccumulatorValue(subexpr);
   1541 
   1542     // Store the subexpression value in the array's elements.
   1543     __ ldr(r1, MemOperand(sp));  // Copy of array literal.
   1544     __ ldr(r1, FieldMemOperand(r1, JSObject::kElementsOffset));
   1545     int offset = FixedArray::kHeaderSize + (i * kPointerSize);
   1546     __ str(result_register(), FieldMemOperand(r1, offset));
   1547 
   1548     // Update the write barrier for the array store with r0 as the scratch
   1549     // register.
   1550     __ RecordWrite(r1, Operand(offset), r2, result_register());
   1551 
   1552     PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
   1553   }
   1554 
   1555   if (result_saved) {
   1556     context()->PlugTOS();
   1557   } else {
   1558     context()->Plug(r0);
   1559   }
   1560 }
   1561 
   1562 
   1563 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
   1564   Comment cmnt(masm_, "[ Assignment");
   1565   // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
   1566   // on the left-hand side.
   1567   if (!expr->target()->IsValidLeftHandSide()) {
   1568     VisitForEffect(expr->target());
   1569     return;
   1570   }
   1571 
   1572   // Left-hand side can only be a property, a global or a (parameter or local)
   1573   // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
   1574   enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
   1575   LhsKind assign_type = VARIABLE;
   1576   Property* property = expr->target()->AsProperty();
   1577   if (property != NULL) {
   1578     assign_type = (property->key()->IsPropertyName())
   1579         ? NAMED_PROPERTY
   1580         : KEYED_PROPERTY;
   1581   }
   1582 
   1583   // Evaluate LHS expression.
   1584   switch (assign_type) {
   1585     case VARIABLE:
   1586       // Nothing to do here.
   1587       break;
   1588     case NAMED_PROPERTY:
   1589       if (expr->is_compound()) {
   1590         // We need the receiver both on the stack and in the accumulator.
   1591         VisitForAccumulatorValue(property->obj());
   1592         __ push(result_register());
   1593       } else {
   1594         VisitForStackValue(property->obj());
   1595       }
   1596       break;
   1597     case KEYED_PROPERTY:
   1598       if (expr->is_compound()) {
   1599         if (property->is_arguments_access()) {
   1600           VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
   1601           __ ldr(r0, EmitSlotSearch(obj_proxy->var()->AsSlot(), r0));
   1602           __ push(r0);
   1603           __ mov(r0, Operand(property->key()->AsLiteral()->handle()));
   1604         } else {
   1605           VisitForStackValue(property->obj());
   1606           VisitForAccumulatorValue(property->key());
   1607         }
   1608         __ ldr(r1, MemOperand(sp, 0));
   1609         __ push(r0);
   1610       } else {
   1611         if (property->is_arguments_access()) {
   1612           VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
   1613           __ ldr(r1, EmitSlotSearch(obj_proxy->var()->AsSlot(), r0));
   1614           __ mov(r0, Operand(property->key()->AsLiteral()->handle()));
   1615           __ Push(r1, r0);
   1616         } else {
   1617           VisitForStackValue(property->obj());
   1618           VisitForStackValue(property->key());
   1619         }
   1620       }
   1621       break;
   1622   }
   1623 
   1624   // For compound assignments we need another deoptimization point after the
   1625   // variable/property load.
   1626   if (expr->is_compound()) {
   1627     { AccumulatorValueContext context(this);
   1628       switch (assign_type) {
   1629         case VARIABLE:
   1630           EmitVariableLoad(expr->target()->AsVariableProxy()->var());
   1631           PrepareForBailout(expr->target(), TOS_REG);
   1632           break;
   1633         case NAMED_PROPERTY:
   1634           EmitNamedPropertyLoad(property);
   1635           PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
   1636           break;
   1637         case KEYED_PROPERTY:
   1638           EmitKeyedPropertyLoad(property);
   1639           PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
   1640           break;
   1641       }
   1642     }
   1643 
   1644     Token::Value op = expr->binary_op();
   1645     __ push(r0);  // Left operand goes on the stack.
   1646     VisitForAccumulatorValue(expr->value());
   1647 
   1648     OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
   1649         ? OVERWRITE_RIGHT
   1650         : NO_OVERWRITE;
   1651     SetSourcePosition(expr->position() + 1);
   1652     AccumulatorValueContext context(this);
   1653     if (ShouldInlineSmiCase(op)) {
   1654       EmitInlineSmiBinaryOp(expr,
   1655                             op,
   1656                             mode,
   1657                             expr->target(),
   1658                             expr->value());
   1659     } else {
   1660       EmitBinaryOp(op, mode);
   1661     }
   1662 
   1663     // Deoptimization point in case the binary operation may have side effects.
   1664     PrepareForBailout(expr->binary_operation(), TOS_REG);
   1665   } else {
   1666     VisitForAccumulatorValue(expr->value());
   1667   }
   1668 
   1669   // Record source position before possible IC call.
   1670   SetSourcePosition(expr->position());
   1671 
   1672   // Store the value.
   1673   switch (assign_type) {
   1674     case VARIABLE:
   1675       EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
   1676                              expr->op());
   1677       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   1678       context()->Plug(r0);
   1679       break;
   1680     case NAMED_PROPERTY:
   1681       EmitNamedPropertyAssignment(expr);
   1682       break;
   1683     case KEYED_PROPERTY:
   1684       EmitKeyedPropertyAssignment(expr);
   1685       break;
   1686   }
   1687 }
   1688 
   1689 
   1690 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
   1691   SetSourcePosition(prop->position());
   1692   Literal* key = prop->key()->AsLiteral();
   1693   __ mov(r2, Operand(key->handle()));
   1694   // Call load IC. It has arguments receiver and property name r0 and r2.
   1695   Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
   1696   EmitCallIC(ic, RelocInfo::CODE_TARGET);
   1697 }
   1698 
   1699 
   1700 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
   1701   SetSourcePosition(prop->position());
   1702   // Call keyed load IC. It has arguments key and receiver in r0 and r1.
   1703   Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
   1704   EmitCallIC(ic, RelocInfo::CODE_TARGET);
   1705 }
   1706 
   1707 
   1708 void FullCodeGenerator::EmitInlineSmiBinaryOp(Expression* expr,
   1709                                               Token::Value op,
   1710                                               OverwriteMode mode,
   1711                                               Expression* left_expr,
   1712                                               Expression* right_expr) {
   1713   Label done, smi_case, stub_call;
   1714 
   1715   Register scratch1 = r2;
   1716   Register scratch2 = r3;
   1717 
   1718   // Get the arguments.
   1719   Register left = r1;
   1720   Register right = r0;
   1721   __ pop(left);
   1722 
   1723   // Perform combined smi check on both operands.
   1724   __ orr(scratch1, left, Operand(right));
   1725   STATIC_ASSERT(kSmiTag == 0);
   1726   JumpPatchSite patch_site(masm_);
   1727   patch_site.EmitJumpIfSmi(scratch1, &smi_case);
   1728 
   1729   __ bind(&stub_call);
   1730   TypeRecordingBinaryOpStub stub(op, mode);
   1731   EmitCallIC(stub.GetCode(), &patch_site);
   1732   __ jmp(&done);
   1733 
   1734   __ bind(&smi_case);
   1735   // Smi case. This code works the same way as the smi-smi case in the type
   1736   // recording binary operation stub, see
   1737   // TypeRecordingBinaryOpStub::GenerateSmiSmiOperation for comments.
   1738   switch (op) {
   1739     case Token::SAR:
   1740       __ b(&stub_call);
   1741       __ GetLeastBitsFromSmi(scratch1, right, 5);
   1742       __ mov(right, Operand(left, ASR, scratch1));
   1743       __ bic(right, right, Operand(kSmiTagMask));
   1744       break;
   1745     case Token::SHL: {
   1746       __ b(&stub_call);
   1747       __ SmiUntag(scratch1, left);
   1748       __ GetLeastBitsFromSmi(scratch2, right, 5);
   1749       __ mov(scratch1, Operand(scratch1, LSL, scratch2));
   1750       __ add(scratch2, scratch1, Operand(0x40000000), SetCC);
   1751       __ b(mi, &stub_call);
   1752       __ SmiTag(right, scratch1);
   1753       break;
   1754     }
   1755     case Token::SHR: {
   1756       __ b(&stub_call);
   1757       __ SmiUntag(scratch1, left);
   1758       __ GetLeastBitsFromSmi(scratch2, right, 5);
   1759       __ mov(scratch1, Operand(scratch1, LSR, scratch2));
   1760       __ tst(scratch1, Operand(0xc0000000));
   1761       __ b(ne, &stub_call);
   1762       __ SmiTag(right, scratch1);
   1763       break;
   1764     }
   1765     case Token::ADD:
   1766       __ add(scratch1, left, Operand(right), SetCC);
   1767       __ b(vs, &stub_call);
   1768       __ mov(right, scratch1);
   1769       break;
   1770     case Token::SUB:
   1771       __ sub(scratch1, left, Operand(right), SetCC);
   1772       __ b(vs, &stub_call);
   1773       __ mov(right, scratch1);
   1774       break;
   1775     case Token::MUL: {
   1776       __ SmiUntag(ip, right);
   1777       __ smull(scratch1, scratch2, left, ip);
   1778       __ mov(ip, Operand(scratch1, ASR, 31));
   1779       __ cmp(ip, Operand(scratch2));
   1780       __ b(ne, &stub_call);
   1781       __ tst(scratch1, Operand(scratch1));
   1782       __ mov(right, Operand(scratch1), LeaveCC, ne);
   1783       __ b(ne, &done);
   1784       __ add(scratch2, right, Operand(left), SetCC);
   1785       __ mov(right, Operand(Smi::FromInt(0)), LeaveCC, pl);
   1786       __ b(mi, &stub_call);
   1787       break;
   1788     }
   1789     case Token::BIT_OR:
   1790       __ orr(right, left, Operand(right));
   1791       break;
   1792     case Token::BIT_AND:
   1793       __ and_(right, left, Operand(right));
   1794       break;
   1795     case Token::BIT_XOR:
   1796       __ eor(right, left, Operand(right));
   1797       break;
   1798     default:
   1799       UNREACHABLE();
   1800   }
   1801 
   1802   __ bind(&done);
   1803   context()->Plug(r0);
   1804 }
   1805 
   1806 
   1807 void FullCodeGenerator::EmitBinaryOp(Token::Value op,
   1808                                      OverwriteMode mode) {
   1809   __ pop(r1);
   1810   TypeRecordingBinaryOpStub stub(op, mode);
   1811   EmitCallIC(stub.GetCode(), NULL);
   1812   context()->Plug(r0);
   1813 }
   1814 
   1815 
   1816 void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
   1817   // Invalid left-hand sides are rewritten to have a 'throw
   1818   // ReferenceError' on the left-hand side.
   1819   if (!expr->IsValidLeftHandSide()) {
   1820     VisitForEffect(expr);
   1821     return;
   1822   }
   1823 
   1824   // Left-hand side can only be a property, a global or a (parameter or local)
   1825   // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
   1826   enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
   1827   LhsKind assign_type = VARIABLE;
   1828   Property* prop = expr->AsProperty();
   1829   if (prop != NULL) {
   1830     assign_type = (prop->key()->IsPropertyName())
   1831         ? NAMED_PROPERTY
   1832         : KEYED_PROPERTY;
   1833   }
   1834 
   1835   switch (assign_type) {
   1836     case VARIABLE: {
   1837       Variable* var = expr->AsVariableProxy()->var();
   1838       EffectContext context(this);
   1839       EmitVariableAssignment(var, Token::ASSIGN);
   1840       break;
   1841     }
   1842     case NAMED_PROPERTY: {
   1843       __ push(r0);  // Preserve value.
   1844       VisitForAccumulatorValue(prop->obj());
   1845       __ mov(r1, r0);
   1846       __ pop(r0);  // Restore value.
   1847       __ mov(r2, Operand(prop->key()->AsLiteral()->handle()));
   1848       Handle<Code> ic = is_strict_mode()
   1849           ? isolate()->builtins()->StoreIC_Initialize_Strict()
   1850           : isolate()->builtins()->StoreIC_Initialize();
   1851       EmitCallIC(ic, RelocInfo::CODE_TARGET);
   1852       break;
   1853     }
   1854     case KEYED_PROPERTY: {
   1855       __ push(r0);  // Preserve value.
   1856       if (prop->is_synthetic()) {
   1857         ASSERT(prop->obj()->AsVariableProxy() != NULL);
   1858         ASSERT(prop->key()->AsLiteral() != NULL);
   1859         { AccumulatorValueContext for_object(this);
   1860           EmitVariableLoad(prop->obj()->AsVariableProxy()->var());
   1861         }
   1862         __ mov(r2, r0);
   1863         __ mov(r1, Operand(prop->key()->AsLiteral()->handle()));
   1864       } else {
   1865         VisitForStackValue(prop->obj());
   1866         VisitForAccumulatorValue(prop->key());
   1867         __ mov(r1, r0);
   1868         __ pop(r2);
   1869       }
   1870       __ pop(r0);  // Restore value.
   1871       Handle<Code> ic = is_strict_mode()
   1872           ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
   1873           : isolate()->builtins()->KeyedStoreIC_Initialize();
   1874       EmitCallIC(ic, RelocInfo::CODE_TARGET);
   1875       break;
   1876     }
   1877   }
   1878   PrepareForBailoutForId(bailout_ast_id, TOS_REG);
   1879   context()->Plug(r0);
   1880 }
   1881 
   1882 
   1883 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
   1884                                                Token::Value op) {
   1885   // Left-hand sides that rewrite to explicit property accesses do not reach
   1886   // here.
   1887   ASSERT(var != NULL);
   1888   ASSERT(var->is_global() || var->AsSlot() != NULL);
   1889 
   1890   if (var->is_global()) {
   1891     ASSERT(!var->is_this());
   1892     // Assignment to a global variable.  Use inline caching for the
   1893     // assignment.  Right-hand-side value is passed in r0, variable name in
   1894     // r2, and the global object in r1.
   1895     __ mov(r2, Operand(var->name()));
   1896     __ ldr(r1, GlobalObjectOperand());
   1897     Handle<Code> ic = is_strict_mode()
   1898         ? isolate()->builtins()->StoreIC_Initialize_Strict()
   1899         : isolate()->builtins()->StoreIC_Initialize();
   1900     EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
   1901 
   1902   } else if (op == Token::INIT_CONST) {
   1903     // Like var declarations, const declarations are hoisted to function
   1904     // scope.  However, unlike var initializers, const initializers are able
   1905     // to drill a hole to that function context, even from inside a 'with'
   1906     // context.  We thus bypass the normal static scope lookup.
   1907     Slot* slot = var->AsSlot();
   1908     Label skip;
   1909     switch (slot->type()) {
   1910       case Slot::PARAMETER:
   1911         // No const parameters.
   1912         UNREACHABLE();
   1913         break;
   1914       case Slot::LOCAL:
   1915         // Detect const reinitialization by checking for the hole value.
   1916         __ ldr(r1, MemOperand(fp, SlotOffset(slot)));
   1917         __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
   1918         __ cmp(r1, ip);
   1919         __ b(ne, &skip);
   1920         __ str(result_register(), MemOperand(fp, SlotOffset(slot)));
   1921         break;
   1922       case Slot::CONTEXT: {
   1923         __ ldr(r1, ContextOperand(cp, Context::FCONTEXT_INDEX));
   1924         __ ldr(r2, ContextOperand(r1, slot->index()));
   1925         __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
   1926         __ cmp(r2, ip);
   1927         __ b(ne, &skip);
   1928         __ str(r0, ContextOperand(r1, slot->index()));
   1929         int offset = Context::SlotOffset(slot->index());
   1930         __ mov(r3, r0);  // Preserve the stored value in r0.
   1931         __ RecordWrite(r1, Operand(offset), r3, r2);
   1932         break;
   1933       }
   1934       case Slot::LOOKUP:
   1935         __ push(r0);
   1936         __ mov(r0, Operand(slot->var()->name()));
   1937         __ Push(cp, r0);  // Context and name.
   1938         __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
   1939         break;
   1940     }
   1941     __ bind(&skip);
   1942 
   1943   } else if (var->mode() != Variable::CONST) {
   1944     // Perform the assignment for non-const variables.  Const assignments
   1945     // are simply skipped.
   1946     Slot* slot = var->AsSlot();
   1947     switch (slot->type()) {
   1948       case Slot::PARAMETER:
   1949       case Slot::LOCAL:
   1950         // Perform the assignment.
   1951         __ str(result_register(), MemOperand(fp, SlotOffset(slot)));
   1952         break;
   1953 
   1954       case Slot::CONTEXT: {
   1955         MemOperand target = EmitSlotSearch(slot, r1);
   1956         // Perform the assignment and issue the write barrier.
   1957         __ str(result_register(), target);
   1958         // RecordWrite may destroy all its register arguments.
   1959         __ mov(r3, result_register());
   1960         int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
   1961         __ RecordWrite(r1, Operand(offset), r2, r3);
   1962         break;
   1963       }
   1964 
   1965       case Slot::LOOKUP:
   1966         // Call the runtime for the assignment.
   1967         __ push(r0);  // Value.
   1968         __ mov(r1, Operand(slot->var()->name()));
   1969         __ mov(r0, Operand(Smi::FromInt(strict_mode_flag())));
   1970         __ Push(cp, r1, r0);  // Context, name, strict mode.
   1971         __ CallRuntime(Runtime::kStoreContextSlot, 4);
   1972         break;
   1973     }
   1974   }
   1975 }
   1976 
   1977 
   1978 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
   1979   // Assignment to a property, using a named store IC.
   1980   Property* prop = expr->target()->AsProperty();
   1981   ASSERT(prop != NULL);
   1982   ASSERT(prop->key()->AsLiteral() != NULL);
   1983 
   1984   // If the assignment starts a block of assignments to the same object,
   1985   // change to slow case to avoid the quadratic behavior of repeatedly
   1986   // adding fast properties.
   1987   if (expr->starts_initialization_block()) {
   1988     __ push(result_register());
   1989     __ ldr(ip, MemOperand(sp, kPointerSize));  // Receiver is now under value.
   1990     __ push(ip);
   1991     __ CallRuntime(Runtime::kToSlowProperties, 1);
   1992     __ pop(result_register());
   1993   }
   1994 
   1995   // Record source code position before IC call.
   1996   SetSourcePosition(expr->position());
   1997   __ mov(r2, Operand(prop->key()->AsLiteral()->handle()));
   1998   // Load receiver to r1. Leave a copy in the stack if needed for turning the
   1999   // receiver into fast case.
   2000   if (expr->ends_initialization_block()) {
   2001     __ ldr(r1, MemOperand(sp));
   2002   } else {
   2003     __ pop(r1);
   2004   }
   2005 
   2006   Handle<Code> ic = is_strict_mode()
   2007       ? isolate()->builtins()->StoreIC_Initialize_Strict()
   2008       : isolate()->builtins()->StoreIC_Initialize();
   2009   EmitCallIC(ic, RelocInfo::CODE_TARGET);
   2010 
   2011   // If the assignment ends an initialization block, revert to fast case.
   2012   if (expr->ends_initialization_block()) {
   2013     __ push(r0);  // Result of assignment, saved even if not needed.
   2014     // Receiver is under the result value.
   2015     __ ldr(ip, MemOperand(sp, kPointerSize));
   2016     __ push(ip);
   2017     __ CallRuntime(Runtime::kToFastProperties, 1);
   2018     __ pop(r0);
   2019     __ Drop(1);
   2020   }
   2021   PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   2022   context()->Plug(r0);
   2023 }
   2024 
   2025 
   2026 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
   2027   // Assignment to a property, using a keyed store IC.
   2028 
   2029   // If the assignment starts a block of assignments to the same object,
   2030   // change to slow case to avoid the quadratic behavior of repeatedly
   2031   // adding fast properties.
   2032   if (expr->starts_initialization_block()) {
   2033     __ push(result_register());
   2034     // Receiver is now under the key and value.
   2035     __ ldr(ip, MemOperand(sp, 2 * kPointerSize));
   2036     __ push(ip);
   2037     __ CallRuntime(Runtime::kToSlowProperties, 1);
   2038     __ pop(result_register());
   2039   }
   2040 
   2041   // Record source code position before IC call.
   2042   SetSourcePosition(expr->position());
   2043   __ pop(r1);  // Key.
   2044   // Load receiver to r2. Leave a copy in the stack if needed for turning the
   2045   // receiver into fast case.
   2046   if (expr->ends_initialization_block()) {
   2047     __ ldr(r2, MemOperand(sp));
   2048   } else {
   2049     __ pop(r2);
   2050   }
   2051 
   2052   Handle<Code> ic = is_strict_mode()
   2053       ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
   2054       : isolate()->builtins()->KeyedStoreIC_Initialize();
   2055   EmitCallIC(ic, RelocInfo::CODE_TARGET);
   2056 
   2057   // If the assignment ends an initialization block, revert to fast case.
   2058   if (expr->ends_initialization_block()) {
   2059     __ push(r0);  // Result of assignment, saved even if not needed.
   2060     // Receiver is under the result value.
   2061     __ ldr(ip, MemOperand(sp, kPointerSize));
   2062     __ push(ip);
   2063     __ CallRuntime(Runtime::kToFastProperties, 1);
   2064     __ pop(r0);
   2065     __ Drop(1);
   2066   }
   2067   PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   2068   context()->Plug(r0);
   2069 }
   2070 
   2071 
   2072 void FullCodeGenerator::VisitProperty(Property* expr) {
   2073   Comment cmnt(masm_, "[ Property");
   2074   Expression* key = expr->key();
   2075 
   2076   if (key->IsPropertyName()) {
   2077     VisitForAccumulatorValue(expr->obj());
   2078     EmitNamedPropertyLoad(expr);
   2079     context()->Plug(r0);
   2080   } else {
   2081     VisitForStackValue(expr->obj());
   2082     VisitForAccumulatorValue(expr->key());
   2083     __ pop(r1);
   2084     EmitKeyedPropertyLoad(expr);
   2085     context()->Plug(r0);
   2086   }
   2087 }
   2088 
   2089 void FullCodeGenerator::EmitCallWithIC(Call* expr,
   2090                                        Handle<Object> name,
   2091                                        RelocInfo::Mode mode) {
   2092   // Code common for calls using the IC.
   2093   ZoneList<Expression*>* args = expr->arguments();
   2094   int arg_count = args->length();
   2095   { PreservePositionScope scope(masm()->positions_recorder());
   2096     for (int i = 0; i < arg_count; i++) {
   2097       VisitForStackValue(args->at(i));
   2098     }
   2099     __ mov(r2, Operand(name));
   2100   }
   2101   // Record source position for debugger.
   2102   SetSourcePosition(expr->position());
   2103   // Call the IC initialization code.
   2104   InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
   2105   Handle<Code> ic =
   2106       isolate()->stub_cache()->ComputeCallInitialize(arg_count, in_loop);
   2107   EmitCallIC(ic, mode);
   2108   RecordJSReturnSite(expr);
   2109   // Restore context register.
   2110   __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
   2111   context()->Plug(r0);
   2112 }
   2113 
   2114 
   2115 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
   2116                                             Expression* key,
   2117                                             RelocInfo::Mode mode) {
   2118   // Load the key.
   2119   VisitForAccumulatorValue(key);
   2120 
   2121   // Swap the name of the function and the receiver on the stack to follow
   2122   // the calling convention for call ICs.
   2123   __ pop(r1);
   2124   __ push(r0);
   2125   __ push(r1);
   2126 
   2127   // Code common for calls using the IC.
   2128   ZoneList<Expression*>* args = expr->arguments();
   2129   int arg_count = args->length();
   2130   { PreservePositionScope scope(masm()->positions_recorder());
   2131     for (int i = 0; i < arg_count; i++) {
   2132       VisitForStackValue(args->at(i));
   2133     }
   2134   }
   2135   // Record source position for debugger.
   2136   SetSourcePosition(expr->position());
   2137   // Call the IC initialization code.
   2138   InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
   2139   Handle<Code> ic =
   2140       isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count, in_loop);
   2141   __ ldr(r2, MemOperand(sp, (arg_count + 1) * kPointerSize));  // Key.
   2142   EmitCallIC(ic, mode);
   2143   RecordJSReturnSite(expr);
   2144   // Restore context register.
   2145   __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
   2146   context()->DropAndPlug(1, r0);  // Drop the key still on the stack.
   2147 }
   2148 
   2149 
   2150 void FullCodeGenerator::EmitCallWithStub(Call* expr) {
   2151   // Code common for calls using the call stub.
   2152   ZoneList<Expression*>* args = expr->arguments();
   2153   int arg_count = args->length();
   2154   { PreservePositionScope scope(masm()->positions_recorder());
   2155     for (int i = 0; i < arg_count; i++) {
   2156       VisitForStackValue(args->at(i));
   2157     }
   2158   }
   2159   // Record source position for debugger.
   2160   SetSourcePosition(expr->position());
   2161   InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
   2162   CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
   2163   __ CallStub(&stub);
   2164   RecordJSReturnSite(expr);
   2165   // Restore context register.
   2166   __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
   2167   context()->DropAndPlug(1, r0);
   2168 }
   2169 
   2170 
   2171 void FullCodeGenerator::EmitResolvePossiblyDirectEval(ResolveEvalFlag flag,
   2172                                                       int arg_count) {
   2173   // Push copy of the first argument or undefined if it doesn't exist.
   2174   if (arg_count > 0) {
   2175     __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
   2176   } else {
   2177     __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
   2178   }
   2179   __ push(r1);
   2180 
   2181   // Push the receiver of the enclosing function and do runtime call.
   2182   __ ldr(r1, MemOperand(fp, (2 + scope()->num_parameters()) * kPointerSize));
   2183   __ push(r1);
   2184   // Push the strict mode flag.
   2185   __ mov(r1, Operand(Smi::FromInt(strict_mode_flag())));
   2186   __ push(r1);
   2187 
   2188   __ CallRuntime(flag == SKIP_CONTEXT_LOOKUP
   2189                  ? Runtime::kResolvePossiblyDirectEvalNoLookup
   2190                  : Runtime::kResolvePossiblyDirectEval, 4);
   2191 }
   2192 
   2193 
   2194 void FullCodeGenerator::VisitCall(Call* expr) {
   2195 #ifdef DEBUG
   2196   // We want to verify that RecordJSReturnSite gets called on all paths
   2197   // through this function.  Avoid early returns.
   2198   expr->return_is_recorded_ = false;
   2199 #endif
   2200 
   2201   Comment cmnt(masm_, "[ Call");
   2202   Expression* fun = expr->expression();
   2203   Variable* var = fun->AsVariableProxy()->AsVariable();
   2204 
   2205   if (var != NULL && var->is_possibly_eval()) {
   2206     // In a call to eval, we first call %ResolvePossiblyDirectEval to
   2207     // resolve the function we need to call and the receiver of the
   2208     // call.  Then we call the resolved function using the given
   2209     // arguments.
   2210     ZoneList<Expression*>* args = expr->arguments();
   2211     int arg_count = args->length();
   2212 
   2213     { PreservePositionScope pos_scope(masm()->positions_recorder());
   2214       VisitForStackValue(fun);
   2215       __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
   2216       __ push(r2);  // Reserved receiver slot.
   2217 
   2218       // Push the arguments.
   2219       for (int i = 0; i < arg_count; i++) {
   2220         VisitForStackValue(args->at(i));
   2221       }
   2222 
   2223       // If we know that eval can only be shadowed by eval-introduced
   2224       // variables we attempt to load the global eval function directly
   2225       // in generated code. If we succeed, there is no need to perform a
   2226       // context lookup in the runtime system.
   2227       Label done;
   2228       if (var->AsSlot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) {
   2229         Label slow;
   2230         EmitLoadGlobalSlotCheckExtensions(var->AsSlot(),
   2231                                           NOT_INSIDE_TYPEOF,
   2232                                           &slow);
   2233         // Push the function and resolve eval.
   2234         __ push(r0);
   2235         EmitResolvePossiblyDirectEval(SKIP_CONTEXT_LOOKUP, arg_count);
   2236         __ jmp(&done);
   2237         __ bind(&slow);
   2238       }
   2239 
   2240       // Push copy of the function (found below the arguments) and
   2241       // resolve eval.
   2242       __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
   2243       __ push(r1);
   2244       EmitResolvePossiblyDirectEval(PERFORM_CONTEXT_LOOKUP, arg_count);
   2245       if (done.is_linked()) {
   2246         __ bind(&done);
   2247       }
   2248 
   2249       // The runtime call returns a pair of values in r0 (function) and
   2250       // r1 (receiver). Touch up the stack with the right values.
   2251       __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize));
   2252       __ str(r1, MemOperand(sp, arg_count * kPointerSize));
   2253     }
   2254 
   2255     // Record source position for debugger.
   2256     SetSourcePosition(expr->position());
   2257     InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
   2258     CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
   2259     __ CallStub(&stub);
   2260     RecordJSReturnSite(expr);
   2261     // Restore context register.
   2262     __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
   2263     context()->DropAndPlug(1, r0);
   2264   } else if (var != NULL && !var->is_this() && var->is_global()) {
   2265     // Push global object as receiver for the call IC.
   2266     __ ldr(r0, GlobalObjectOperand());
   2267     __ push(r0);
   2268     EmitCallWithIC(expr, var->name(), RelocInfo::CODE_TARGET_CONTEXT);
   2269   } else if (var != NULL && var->AsSlot() != NULL &&
   2270              var->AsSlot()->type() == Slot::LOOKUP) {
   2271     // Call to a lookup slot (dynamically introduced variable).
   2272     Label slow, done;
   2273 
   2274     { PreservePositionScope scope(masm()->positions_recorder());
   2275       // Generate code for loading from variables potentially shadowed
   2276       // by eval-introduced variables.
   2277       EmitDynamicLoadFromSlotFastCase(var->AsSlot(),
   2278                                       NOT_INSIDE_TYPEOF,
   2279                                       &slow,
   2280                                       &done);
   2281     }
   2282 
   2283     __ bind(&slow);
   2284     // Call the runtime to find the function to call (returned in r0)
   2285     // and the object holding it (returned in edx).
   2286     __ push(context_register());
   2287     __ mov(r2, Operand(var->name()));
   2288     __ push(r2);
   2289     __ CallRuntime(Runtime::kLoadContextSlot, 2);
   2290     __ Push(r0, r1);  // Function, receiver.
   2291 
   2292     // If fast case code has been generated, emit code to push the
   2293     // function and receiver and have the slow path jump around this
   2294     // code.
   2295     if (done.is_linked()) {
   2296       Label call;
   2297       __ b(&call);
   2298       __ bind(&done);
   2299       // Push function.
   2300       __ push(r0);
   2301       // Push global receiver.
   2302       __ ldr(r1, GlobalObjectOperand());
   2303       __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
   2304       __ push(r1);
   2305       __ bind(&call);
   2306     }
   2307 
   2308     EmitCallWithStub(expr);
   2309   } else if (fun->AsProperty() != NULL) {
   2310     // Call to an object property.
   2311     Property* prop = fun->AsProperty();
   2312     Literal* key = prop->key()->AsLiteral();
   2313     if (key != NULL && key->handle()->IsSymbol()) {
   2314       // Call to a named property, use call IC.
   2315       { PreservePositionScope scope(masm()->positions_recorder());
   2316         VisitForStackValue(prop->obj());
   2317       }
   2318       EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET);
   2319     } else {
   2320       // Call to a keyed property.
   2321       // For a synthetic property use keyed load IC followed by function call,
   2322       // for a regular property use keyed CallIC.
   2323       if (prop->is_synthetic()) {
   2324         // Do not visit the object and key subexpressions (they are shared
   2325         // by all occurrences of the same rewritten parameter).
   2326         ASSERT(prop->obj()->AsVariableProxy() != NULL);
   2327         ASSERT(prop->obj()->AsVariableProxy()->var()->AsSlot() != NULL);
   2328         Slot* slot = prop->obj()->AsVariableProxy()->var()->AsSlot();
   2329         MemOperand operand = EmitSlotSearch(slot, r1);
   2330         __ ldr(r1, operand);
   2331 
   2332         ASSERT(prop->key()->AsLiteral() != NULL);
   2333         ASSERT(prop->key()->AsLiteral()->handle()->IsSmi());
   2334         __ mov(r0, Operand(prop->key()->AsLiteral()->handle()));
   2335 
   2336         // Record source code position for IC call.
   2337         SetSourcePosition(prop->position());
   2338 
   2339         Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
   2340         EmitCallIC(ic, RelocInfo::CODE_TARGET);
   2341         __ ldr(r1, GlobalObjectOperand());
   2342         __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
   2343         __ Push(r0, r1);  // Function, receiver.
   2344         EmitCallWithStub(expr);
   2345       } else {
   2346         { PreservePositionScope scope(masm()->positions_recorder());
   2347           VisitForStackValue(prop->obj());
   2348         }
   2349         EmitKeyedCallWithIC(expr, prop->key(), RelocInfo::CODE_TARGET);
   2350       }
   2351     }
   2352   } else {
   2353     { PreservePositionScope scope(masm()->positions_recorder());
   2354       VisitForStackValue(fun);
   2355     }
   2356     // Load global receiver object.
   2357     __ ldr(r1, GlobalObjectOperand());
   2358     __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
   2359     __ push(r1);
   2360     // Emit function call.
   2361     EmitCallWithStub(expr);
   2362   }
   2363 
   2364 #ifdef DEBUG
   2365   // RecordJSReturnSite should have been called.
   2366   ASSERT(expr->return_is_recorded_);
   2367 #endif
   2368 }
   2369 
   2370 
   2371 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
   2372   Comment cmnt(masm_, "[ CallNew");
   2373   // According to ECMA-262, section 11.2.2, page 44, the function
   2374   // expression in new calls must be evaluated before the
   2375   // arguments.
   2376 
   2377   // Push constructor on the stack.  If it's not a function it's used as
   2378   // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
   2379   // ignored.
   2380   VisitForStackValue(expr->expression());
   2381 
   2382   // Push the arguments ("left-to-right") on the stack.
   2383   ZoneList<Expression*>* args = expr->arguments();
   2384   int arg_count = args->length();
   2385   for (int i = 0; i < arg_count; i++) {
   2386     VisitForStackValue(args->at(i));
   2387   }
   2388 
   2389   // Call the construct call builtin that handles allocation and
   2390   // constructor invocation.
   2391   SetSourcePosition(expr->position());
   2392 
   2393   // Load function and argument count into r1 and r0.
   2394   __ mov(r0, Operand(arg_count));
   2395   __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
   2396 
   2397   Handle<Code> construct_builtin =
   2398       isolate()->builtins()->JSConstructCall();
   2399   __ Call(construct_builtin, RelocInfo::CONSTRUCT_CALL);
   2400   context()->Plug(r0);
   2401 }
   2402 
   2403 
   2404 void FullCodeGenerator::EmitIsSmi(ZoneList<Expression*>* args) {
   2405   ASSERT(args->length() == 1);
   2406 
   2407   VisitForAccumulatorValue(args->at(0));
   2408 
   2409   Label materialize_true, materialize_false;
   2410   Label* if_true = NULL;
   2411   Label* if_false = NULL;
   2412   Label* fall_through = NULL;
   2413   context()->PrepareTest(&materialize_true, &materialize_false,
   2414                          &if_true, &if_false, &fall_through);
   2415 
   2416   PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
   2417   __ tst(r0, Operand(kSmiTagMask));
   2418   Split(eq, if_true, if_false, fall_through);
   2419 
   2420   context()->Plug(if_true, if_false);
   2421 }
   2422 
   2423 
   2424 void FullCodeGenerator::EmitIsNonNegativeSmi(ZoneList<Expression*>* args) {
   2425   ASSERT(args->length() == 1);
   2426 
   2427   VisitForAccumulatorValue(args->at(0));
   2428 
   2429   Label materialize_true, materialize_false;
   2430   Label* if_true = NULL;
   2431   Label* if_false = NULL;
   2432   Label* fall_through = NULL;
   2433   context()->PrepareTest(&materialize_true, &materialize_false,
   2434                          &if_true, &if_false, &fall_through);
   2435 
   2436   PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
   2437   __ tst(r0, Operand(kSmiTagMask | 0x80000000));
   2438   Split(eq, if_true, if_false, fall_through);
   2439 
   2440   context()->Plug(if_true, if_false);
   2441 }
   2442 
   2443 
   2444 void FullCodeGenerator::EmitIsObject(ZoneList<Expression*>* args) {
   2445   ASSERT(args->length() == 1);
   2446 
   2447   VisitForAccumulatorValue(args->at(0));
   2448 
   2449   Label materialize_true, materialize_false;
   2450   Label* if_true = NULL;
   2451   Label* if_false = NULL;
   2452   Label* fall_through = NULL;
   2453   context()->PrepareTest(&materialize_true, &materialize_false,
   2454                          &if_true, &if_false, &fall_through);
   2455 
   2456   __ JumpIfSmi(r0, if_false);
   2457   __ LoadRoot(ip, Heap::kNullValueRootIndex);
   2458   __ cmp(r0, ip);
   2459   __ b(eq, if_true);
   2460   __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
   2461   // Undetectable objects behave like undefined when tested with typeof.
   2462   __ ldrb(r1, FieldMemOperand(r2, Map::kBitFieldOffset));
   2463   __ tst(r1, Operand(1 << Map::kIsUndetectable));
   2464   __ b(ne, if_false);
   2465   __ ldrb(r1, FieldMemOperand(r2, Map::kInstanceTypeOffset));
   2466   __ cmp(r1, Operand(FIRST_JS_OBJECT_TYPE));
   2467   __ b(lt, if_false);
   2468   __ cmp(r1, Operand(LAST_JS_OBJECT_TYPE));
   2469   PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
   2470   Split(le, if_true, if_false, fall_through);
   2471 
   2472   context()->Plug(if_true, if_false);
   2473 }
   2474 
   2475 
   2476 void FullCodeGenerator::EmitIsSpecObject(ZoneList<Expression*>* args) {
   2477   ASSERT(args->length() == 1);
   2478 
   2479   VisitForAccumulatorValue(args->at(0));
   2480 
   2481   Label materialize_true, materialize_false;
   2482   Label* if_true = NULL;
   2483   Label* if_false = NULL;
   2484   Label* fall_through = NULL;
   2485   context()->PrepareTest(&materialize_true, &materialize_false,
   2486                          &if_true, &if_false, &fall_through);
   2487 
   2488   __ JumpIfSmi(r0, if_false);
   2489   __ CompareObjectType(r0, r1, r1, FIRST_JS_OBJECT_TYPE);
   2490   PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
   2491   Split(ge, if_true, if_false, fall_through);
   2492 
   2493   context()->Plug(if_true, if_false);
   2494 }
   2495 
   2496 
   2497 void FullCodeGenerator::EmitIsUndetectableObject(ZoneList<Expression*>* args) {
   2498   ASSERT(args->length() == 1);
   2499 
   2500   VisitForAccumulatorValue(args->at(0));
   2501 
   2502   Label materialize_true, materialize_false;
   2503   Label* if_true = NULL;
   2504   Label* if_false = NULL;
   2505   Label* fall_through = NULL;
   2506   context()->PrepareTest(&materialize_true, &materialize_false,
   2507                          &if_true, &if_false, &fall_through);
   2508 
   2509   __ JumpIfSmi(r0, if_false);
   2510   __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
   2511   __ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset));
   2512   __ tst(r1, Operand(1 << Map::kIsUndetectable));
   2513   PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
   2514   Split(ne, if_true, if_false, fall_through);
   2515 
   2516   context()->Plug(if_true, if_false);
   2517 }
   2518 
   2519 
   2520 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
   2521     ZoneList<Expression*>* args) {
   2522 
   2523   ASSERT(args->length() == 1);
   2524 
   2525   VisitForAccumulatorValue(args->at(0));
   2526 
   2527   Label materialize_true, materialize_false;
   2528   Label* if_true = NULL;
   2529   Label* if_false = NULL;
   2530   Label* fall_through = NULL;
   2531   context()->PrepareTest(&materialize_true, &materialize_false,
   2532                          &if_true, &if_false, &fall_through);
   2533 
   2534   if (FLAG_debug_code) __ AbortIfSmi(r0);
   2535 
   2536   __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
   2537   __ ldrb(ip, FieldMemOperand(r1, Map::kBitField2Offset));
   2538   __ tst(ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
   2539   __ b(ne, if_true);
   2540 
   2541   // Check for fast case object. Generate false result for slow case object.
   2542   __ ldr(r2, FieldMemOperand(r0, JSObject::kPropertiesOffset));
   2543   __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
   2544   __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
   2545   __ cmp(r2, ip);
   2546   __ b(eq, if_false);
   2547 
   2548   // Look for valueOf symbol in the descriptor array, and indicate false if
   2549   // found. The type is not checked, so if it is a transition it is a false
   2550   // negative.
   2551   __ ldr(r4, FieldMemOperand(r1, Map::kInstanceDescriptorsOffset));
   2552   __ ldr(r3, FieldMemOperand(r4, FixedArray::kLengthOffset));
   2553   // r4: descriptor array
   2554   // r3: length of descriptor array
   2555   // Calculate the end of the descriptor array.
   2556   STATIC_ASSERT(kSmiTag == 0);
   2557   STATIC_ASSERT(kSmiTagSize == 1);
   2558   STATIC_ASSERT(kPointerSize == 4);
   2559   __ add(r2, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
   2560   __ add(r2, r2, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize));
   2561 
   2562   // Calculate location of the first key name.
   2563   __ add(r4,
   2564          r4,
   2565          Operand(FixedArray::kHeaderSize - kHeapObjectTag +
   2566                  DescriptorArray::kFirstIndex * kPointerSize));
   2567   // Loop through all the keys in the descriptor array. If one of these is the
   2568   // symbol valueOf the result is false.
   2569   Label entry, loop;
   2570   // The use of ip to store the valueOf symbol asumes that it is not otherwise
   2571   // used in the loop below.
   2572   __ mov(ip, Operand(FACTORY->value_of_symbol()));
   2573   __ jmp(&entry);
   2574   __ bind(&loop);
   2575   __ ldr(r3, MemOperand(r4, 0));
   2576   __ cmp(r3, ip);
   2577   __ b(eq, if_false);
   2578   __ add(r4, r4, Operand(kPointerSize));
   2579   __ bind(&entry);
   2580   __ cmp(r4, Operand(r2));
   2581   __ b(ne, &loop);
   2582 
   2583   // If a valueOf property is not found on the object check that it's
   2584   // prototype is the un-modified String prototype. If not result is false.
   2585   __ ldr(r2, FieldMemOperand(r1, Map::kPrototypeOffset));
   2586   __ tst(r2, Operand(kSmiTagMask));
   2587   __ b(eq, if_false);
   2588   __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
   2589   __ ldr(r3, ContextOperand(cp, Context::GLOBAL_INDEX));
   2590   __ ldr(r3, FieldMemOperand(r3, GlobalObject::kGlobalContextOffset));
   2591   __ ldr(r3, ContextOperand(r3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
   2592   __ cmp(r2, r3);
   2593   __ b(ne, if_false);
   2594 
   2595   // Set the bit in the map to indicate that it has been checked safe for
   2596   // default valueOf and set true result.
   2597   __ ldrb(r2, FieldMemOperand(r1, Map::kBitField2Offset));
   2598   __ orr(r2, r2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
   2599   __ strb(r2, FieldMemOperand(r1, Map::kBitField2Offset));
   2600   __ jmp(if_true);
   2601 
   2602   PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
   2603   context()->Plug(if_true, if_false);
   2604 }
   2605 
   2606 
   2607 void FullCodeGenerator::EmitIsFunction(ZoneList<Expression*>* args) {
   2608   ASSERT(args->length() == 1);
   2609 
   2610   VisitForAccumulatorValue(args->at(0));
   2611 
   2612   Label materialize_true, materialize_false;
   2613   Label* if_true = NULL;
   2614   Label* if_false = NULL;
   2615   Label* fall_through = NULL;
   2616   context()->PrepareTest(&materialize_true, &materialize_false,
   2617                          &if_true, &if_false, &fall_through);
   2618 
   2619   __ JumpIfSmi(r0, if_false);
   2620   __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
   2621   PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
   2622   Split(eq, if_true, if_false, fall_through);
   2623 
   2624   context()->Plug(if_true, if_false);
   2625 }
   2626 
   2627 
   2628 void FullCodeGenerator::EmitIsArray(ZoneList<Expression*>* args) {
   2629   ASSERT(args->length() == 1);
   2630 
   2631   VisitForAccumulatorValue(args->at(0));
   2632 
   2633   Label materialize_true, materialize_false;
   2634   Label* if_true = NULL;
   2635   Label* if_false = NULL;
   2636   Label* fall_through = NULL;
   2637   context()->PrepareTest(&materialize_true, &materialize_false,
   2638                          &if_true, &if_false, &fall_through);
   2639 
   2640   __ JumpIfSmi(r0, if_false);
   2641   __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE);
   2642   PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
   2643   Split(eq, if_true, if_false, fall_through);
   2644 
   2645   context()->Plug(if_true, if_false);
   2646 }
   2647 
   2648 
   2649 void FullCodeGenerator::EmitIsRegExp(ZoneList<Expression*>* args) {
   2650   ASSERT(args->length() == 1);
   2651 
   2652   VisitForAccumulatorValue(args->at(0));
   2653 
   2654   Label materialize_true, materialize_false;
   2655   Label* if_true = NULL;
   2656   Label* if_false = NULL;
   2657   Label* fall_through = NULL;
   2658   context()->PrepareTest(&materialize_true, &materialize_false,
   2659                          &if_true, &if_false, &fall_through);
   2660 
   2661   __ JumpIfSmi(r0, if_false);
   2662   __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
   2663   PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
   2664   Split(eq, if_true, if_false, fall_through);
   2665 
   2666   context()->Plug(if_true, if_false);
   2667 }
   2668 
   2669 
   2670 
   2671 void FullCodeGenerator::EmitIsConstructCall(ZoneList<Expression*>* args) {
   2672   ASSERT(args->length() == 0);
   2673 
   2674   Label materialize_true, materialize_false;
   2675   Label* if_true = NULL;
   2676   Label* if_false = NULL;
   2677   Label* fall_through = NULL;
   2678   context()->PrepareTest(&materialize_true, &materialize_false,
   2679                          &if_true, &if_false, &fall_through);
   2680 
   2681   // Get the frame pointer for the calling frame.
   2682   __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
   2683 
   2684   // Skip the arguments adaptor frame if it exists.
   2685   Label check_frame_marker;
   2686   __ ldr(r1, MemOperand(r2, StandardFrameConstants::kContextOffset));
   2687   __ cmp(r1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
   2688   __ b(ne, &check_frame_marker);
   2689   __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset));
   2690 
   2691   // Check the marker in the calling frame.
   2692   __ bind(&check_frame_marker);
   2693   __ ldr(r1, MemOperand(r2, StandardFrameConstants::kMarkerOffset));
   2694   __ cmp(r1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
   2695   PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
   2696   Split(eq, if_true, if_false, fall_through);
   2697 
   2698   context()->Plug(if_true, if_false);
   2699 }
   2700 
   2701 
   2702 void FullCodeGenerator::EmitObjectEquals(ZoneList<Expression*>* args) {
   2703   ASSERT(args->length() == 2);
   2704 
   2705   // Load the two objects into registers and perform the comparison.
   2706   VisitForStackValue(args->at(0));
   2707   VisitForAccumulatorValue(args->at(1));
   2708 
   2709   Label materialize_true, materialize_false;
   2710   Label* if_true = NULL;
   2711   Label* if_false = NULL;
   2712   Label* fall_through = NULL;
   2713   context()->PrepareTest(&materialize_true, &materialize_false,
   2714                          &if_true, &if_false, &fall_through);
   2715 
   2716   __ pop(r1);
   2717   __ cmp(r0, r1);
   2718   PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
   2719   Split(eq, if_true, if_false, fall_through);
   2720 
   2721   context()->Plug(if_true, if_false);
   2722 }
   2723 
   2724 
   2725 void FullCodeGenerator::EmitArguments(ZoneList<Expression*>* args) {
   2726   ASSERT(args->length() == 1);
   2727 
   2728   // ArgumentsAccessStub expects the key in edx and the formal
   2729   // parameter count in r0.
   2730   VisitForAccumulatorValue(args->at(0));
   2731   __ mov(r1, r0);
   2732   __ mov(r0, Operand(Smi::FromInt(scope()->num_parameters())));
   2733   ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
   2734   __ CallStub(&stub);
   2735   context()->Plug(r0);
   2736 }
   2737 
   2738 
   2739 void FullCodeGenerator::EmitArgumentsLength(ZoneList<Expression*>* args) {
   2740   ASSERT(args->length() == 0);
   2741 
   2742   Label exit;
   2743   // Get the number of formal parameters.
   2744   __ mov(r0, Operand(Smi::FromInt(scope()->num_parameters())));
   2745 
   2746   // Check if the calling frame is an arguments adaptor frame.
   2747   __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
   2748   __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
   2749   __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
   2750   __ b(ne, &exit);
   2751 
   2752   // Arguments adaptor case: Read the arguments length from the
   2753   // adaptor frame.
   2754   __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset));
   2755 
   2756   __ bind(&exit);
   2757   context()->Plug(r0);
   2758 }
   2759 
   2760 
   2761 void FullCodeGenerator::EmitClassOf(ZoneList<Expression*>* args) {
   2762   ASSERT(args->length() == 1);
   2763   Label done, null, function, non_function_constructor;
   2764 
   2765   VisitForAccumulatorValue(args->at(0));
   2766 
   2767   // If the object is a smi, we return null.
   2768   __ JumpIfSmi(r0, &null);
   2769 
   2770   // Check that the object is a JS object but take special care of JS
   2771   // functions to make sure they have 'Function' as their class.
   2772   __ CompareObjectType(r0, r0, r1, FIRST_JS_OBJECT_TYPE);  // Map is now in r0.
   2773   __ b(lt, &null);
   2774 
   2775   // As long as JS_FUNCTION_TYPE is the last instance type and it is
   2776   // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
   2777   // LAST_JS_OBJECT_TYPE.
   2778   ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
   2779   ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
   2780   __ cmp(r1, Operand(JS_FUNCTION_TYPE));
   2781   __ b(eq, &function);
   2782 
   2783   // Check if the constructor in the map is a function.
   2784   __ ldr(r0, FieldMemOperand(r0, Map::kConstructorOffset));
   2785   __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
   2786   __ b(ne, &non_function_constructor);
   2787 
   2788   // r0 now contains the constructor function. Grab the
   2789   // instance class name from there.
   2790   __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
   2791   __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset));
   2792   __ b(&done);
   2793 
   2794   // Functions have class 'Function'.
   2795   __ bind(&function);
   2796   __ LoadRoot(r0, Heap::kfunction_class_symbolRootIndex);
   2797   __ jmp(&done);
   2798 
   2799   // Objects with a non-function constructor have class 'Object'.
   2800   __ bind(&non_function_constructor);
   2801   __ LoadRoot(r0, Heap::kfunction_class_symbolRootIndex);
   2802   __ jmp(&done);
   2803 
   2804   // Non-JS objects have class null.
   2805   __ bind(&null);
   2806   __ LoadRoot(r0, Heap::kNullValueRootIndex);
   2807 
   2808   // All done.
   2809   __ bind(&done);
   2810 
   2811   context()->Plug(r0);
   2812 }
   2813 
   2814 
   2815 void FullCodeGenerator::EmitLog(ZoneList<Expression*>* args) {
   2816   // Conditionally generate a log call.
   2817   // Args:
   2818   //   0 (literal string): The type of logging (corresponds to the flags).
   2819   //     This is used to determine whether or not to generate the log call.
   2820   //   1 (string): Format string.  Access the string at argument index 2
   2821   //     with '%2s' (see Logger::LogRuntime for all the formats).
   2822   //   2 (array): Arguments to the format string.
   2823   ASSERT_EQ(args->length(), 3);
   2824 #ifdef ENABLE_LOGGING_AND_PROFILING
   2825   if (CodeGenerator::ShouldGenerateLog(args->at(0))) {
   2826     VisitForStackValue(args->at(1));
   2827     VisitForStackValue(args->at(2));
   2828     __ CallRuntime(Runtime::kLog, 2);
   2829   }
   2830 #endif
   2831   // Finally, we're expected to leave a value on the top of the stack.
   2832   __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
   2833   context()->Plug(r0);
   2834 }
   2835 
   2836 
   2837 void FullCodeGenerator::EmitRandomHeapNumber(ZoneList<Expression*>* args) {
   2838   ASSERT(args->length() == 0);
   2839 
   2840   Label slow_allocate_heapnumber;
   2841   Label heapnumber_allocated;
   2842 
   2843   __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
   2844   __ AllocateHeapNumber(r4, r1, r2, r6, &slow_allocate_heapnumber);
   2845   __ jmp(&heapnumber_allocated);
   2846 
   2847   __ bind(&slow_allocate_heapnumber);
   2848   // Allocate a heap number.
   2849   __ CallRuntime(Runtime::kNumberAlloc, 0);
   2850   __ mov(r4, Operand(r0));
   2851 
   2852   __ bind(&heapnumber_allocated);
   2853 
   2854   // Convert 32 random bits in r0 to 0.(32 random bits) in a double
   2855   // by computing:
   2856   // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
   2857   if (CpuFeatures::IsSupported(VFP3)) {
   2858     __ PrepareCallCFunction(1, r0);
   2859     __ mov(r0, Operand(ExternalReference::isolate_address()));
   2860     __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
   2861 
   2862     CpuFeatures::Scope scope(VFP3);
   2863     // 0x41300000 is the top half of 1.0 x 2^20 as a double.
   2864     // Create this constant using mov/orr to avoid PC relative load.
   2865     __ mov(r1, Operand(0x41000000));
   2866     __ orr(r1, r1, Operand(0x300000));
   2867     // Move 0x41300000xxxxxxxx (x = random bits) to VFP.
   2868     __ vmov(d7, r0, r1);
   2869     // Move 0x4130000000000000 to VFP.
   2870     __ mov(r0, Operand(0, RelocInfo::NONE));
   2871     __ vmov(d8, r0, r1);
   2872     // Subtract and store the result in the heap number.
   2873     __ vsub(d7, d7, d8);
   2874     __ sub(r0, r4, Operand(kHeapObjectTag));
   2875     __ vstr(d7, r0, HeapNumber::kValueOffset);
   2876     __ mov(r0, r4);
   2877   } else {
   2878     __ PrepareCallCFunction(2, r0);
   2879     __ mov(r0, Operand(r4));
   2880     __ mov(r1, Operand(ExternalReference::isolate_address()));
   2881     __ CallCFunction(
   2882         ExternalReference::fill_heap_number_with_random_function(isolate()), 2);
   2883   }
   2884 
   2885   context()->Plug(r0);
   2886 }
   2887 
   2888 
   2889 void FullCodeGenerator::EmitSubString(ZoneList<Expression*>* args) {
   2890   // Load the arguments on the stack and call the stub.
   2891   SubStringStub stub;
   2892   ASSERT(args->length() == 3);
   2893   VisitForStackValue(args->at(0));
   2894   VisitForStackValue(args->at(1));
   2895   VisitForStackValue(args->at(2));
   2896   __ CallStub(&stub);
   2897   context()->Plug(r0);
   2898 }
   2899 
   2900 
   2901 void FullCodeGenerator::EmitRegExpExec(ZoneList<Expression*>* args) {
   2902   // Load the arguments on the stack and call the stub.
   2903   RegExpExecStub stub;
   2904   ASSERT(args->length() == 4);
   2905   VisitForStackValue(args->at(0));
   2906   VisitForStackValue(args->at(1));
   2907   VisitForStackValue(args->at(2));
   2908   VisitForStackValue(args->at(3));
   2909   __ CallStub(&stub);
   2910   context()->Plug(r0);
   2911 }
   2912 
   2913 
   2914 void FullCodeGenerator::EmitValueOf(ZoneList<Expression*>* args) {
   2915   ASSERT(args->length() == 1);
   2916 
   2917   VisitForAccumulatorValue(args->at(0));  // Load the object.
   2918 
   2919   Label done;
   2920   // If the object is a smi return the object.
   2921   __ JumpIfSmi(r0, &done);
   2922   // If the object is not a value type, return the object.
   2923   __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE);
   2924   __ b(ne, &done);
   2925   __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset));
   2926 
   2927   __ bind(&done);
   2928   context()->Plug(r0);
   2929 }
   2930 
   2931 
   2932 void FullCodeGenerator::EmitMathPow(ZoneList<Expression*>* args) {
   2933   // Load the arguments on the stack and call the runtime function.
   2934   ASSERT(args->length() == 2);
   2935   VisitForStackValue(args->at(0));
   2936   VisitForStackValue(args->at(1));
   2937   MathPowStub stub;
   2938   __ CallStub(&stub);
   2939   context()->Plug(r0);
   2940 }
   2941 
   2942 
   2943 void FullCodeGenerator::EmitSetValueOf(ZoneList<Expression*>* args) {
   2944   ASSERT(args->length() == 2);
   2945 
   2946   VisitForStackValue(args->at(0));  // Load the object.
   2947   VisitForAccumulatorValue(args->at(1));  // Load the value.
   2948   __ pop(r1);  // r0 = value. r1 = object.
   2949 
   2950   Label done;
   2951   // If the object is a smi, return the value.
   2952   __ JumpIfSmi(r1, &done);
   2953 
   2954   // If the object is not a value type, return the value.
   2955   __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE);
   2956   __ b(ne, &done);
   2957 
   2958   // Store the value.
   2959   __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset));
   2960   // Update the write barrier.  Save the value as it will be
   2961   // overwritten by the write barrier code and is needed afterward.
   2962   __ RecordWrite(r1, Operand(JSValue::kValueOffset - kHeapObjectTag), r2, r3);
   2963 
   2964   __ bind(&done);
   2965   context()->Plug(r0);
   2966 }
   2967 
   2968 
   2969 void FullCodeGenerator::EmitNumberToString(ZoneList<Expression*>* args) {
   2970   ASSERT_EQ(args->length(), 1);
   2971 
   2972   // Load the argument on the stack and call the stub.
   2973   VisitForStackValue(args->at(0));
   2974 
   2975   NumberToStringStub stub;
   2976   __ CallStub(&stub);
   2977   context()->Plug(r0);
   2978 }
   2979 
   2980 
   2981 void FullCodeGenerator::EmitStringCharFromCode(ZoneList<Expression*>* args) {
   2982   ASSERT(args->length() == 1);
   2983 
   2984   VisitForAccumulatorValue(args->at(0));
   2985 
   2986   Label done;
   2987   StringCharFromCodeGenerator generator(r0, r1);
   2988   generator.GenerateFast(masm_);
   2989   __ jmp(&done);
   2990 
   2991   NopRuntimeCallHelper call_helper;
   2992   generator.GenerateSlow(masm_, call_helper);
   2993 
   2994   __ bind(&done);
   2995   context()->Plug(r1);
   2996 }
   2997 
   2998 
   2999 void FullCodeGenerator::EmitStringCharCodeAt(ZoneList<Expression*>* args) {
   3000   ASSERT(args->length() == 2);
   3001 
   3002   VisitForStackValue(args->at(0));
   3003   VisitForAccumulatorValue(args->at(1));
   3004 
   3005   Register object = r1;
   3006   Register index = r0;
   3007   Register scratch = r2;
   3008   Register result = r3;
   3009 
   3010   __ pop(object);
   3011 
   3012   Label need_conversion;
   3013   Label index_out_of_range;
   3014   Label done;
   3015   StringCharCodeAtGenerator generator(object,
   3016                                       index,
   3017                                       scratch,
   3018                                       result,
   3019                                       &need_conversion,
   3020                                       &need_conversion,
   3021                                       &index_out_of_range,
   3022                                       STRING_INDEX_IS_NUMBER);
   3023   generator.GenerateFast(masm_);
   3024   __ jmp(&done);
   3025 
   3026   __ bind(&index_out_of_range);
   3027   // When the index is out of range, the spec requires us to return
   3028   // NaN.
   3029   __ LoadRoot(result, Heap::kNanValueRootIndex);
   3030   __ jmp(&done);
   3031 
   3032   __ bind(&need_conversion);
   3033   // Load the undefined value into the result register, which will
   3034   // trigger conversion.
   3035   __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
   3036   __ jmp(&done);
   3037 
   3038   NopRuntimeCallHelper call_helper;
   3039   generator.GenerateSlow(masm_, call_helper);
   3040 
   3041   __ bind(&done);
   3042   context()->Plug(result);
   3043 }
   3044 
   3045 
   3046 void FullCodeGenerator::EmitStringCharAt(ZoneList<Expression*>* args) {
   3047   ASSERT(args->length() == 2);
   3048 
   3049   VisitForStackValue(args->at(0));
   3050   VisitForAccumulatorValue(args->at(1));
   3051 
   3052   Register object = r1;
   3053   Register index = r0;
   3054   Register scratch1 = r2;
   3055   Register scratch2 = r3;
   3056   Register result = r0;
   3057 
   3058   __ pop(object);
   3059 
   3060   Label need_conversion;
   3061   Label index_out_of_range;
   3062   Label done;
   3063   StringCharAtGenerator generator(object,
   3064                                   index,
   3065                                   scratch1,
   3066                                   scratch2,
   3067                                   result,
   3068                                   &need_conversion,
   3069                                   &need_conversion,
   3070                                   &index_out_of_range,
   3071                                   STRING_INDEX_IS_NUMBER);
   3072   generator.GenerateFast(masm_);
   3073   __ jmp(&done);
   3074 
   3075   __ bind(&index_out_of_range);
   3076   // When the index is out of range, the spec requires us to return
   3077   // the empty string.
   3078   __ LoadRoot(result, Heap::kEmptyStringRootIndex);
   3079   __ jmp(&done);
   3080 
   3081   __ bind(&need_conversion);
   3082   // Move smi zero into the result register, which will trigger
   3083   // conversion.
   3084   __ mov(result, Operand(Smi::FromInt(0)));
   3085   __ jmp(&done);
   3086 
   3087   NopRuntimeCallHelper call_helper;
   3088   generator.GenerateSlow(masm_, call_helper);
   3089 
   3090   __ bind(&done);
   3091   context()->Plug(result);
   3092 }
   3093 
   3094 
   3095 void FullCodeGenerator::EmitStringAdd(ZoneList<Expression*>* args) {
   3096   ASSERT_EQ(2, args->length());
   3097 
   3098   VisitForStackValue(args->at(0));
   3099   VisitForStackValue(args->at(1));
   3100 
   3101   StringAddStub stub(NO_STRING_ADD_FLAGS);
   3102   __ CallStub(&stub);
   3103   context()->Plug(r0);
   3104 }
   3105 
   3106 
   3107 void FullCodeGenerator::EmitStringCompare(ZoneList<Expression*>* args) {
   3108   ASSERT_EQ(2, args->length());
   3109 
   3110   VisitForStackValue(args->at(0));
   3111   VisitForStackValue(args->at(1));
   3112 
   3113   StringCompareStub stub;
   3114   __ CallStub(&stub);
   3115   context()->Plug(r0);
   3116 }
   3117 
   3118 
   3119 void FullCodeGenerator::EmitMathSin(ZoneList<Expression*>* args) {
   3120   // Load the argument on the stack and call the stub.
   3121   TranscendentalCacheStub stub(TranscendentalCache::SIN,
   3122                                TranscendentalCacheStub::TAGGED);
   3123   ASSERT(args->length() == 1);
   3124   VisitForStackValue(args->at(0));
   3125   __ CallStub(&stub);
   3126   context()->Plug(r0);
   3127 }
   3128 
   3129 
   3130 void FullCodeGenerator::EmitMathCos(ZoneList<Expression*>* args) {
   3131   // Load the argument on the stack and call the stub.
   3132   TranscendentalCacheStub stub(TranscendentalCache::COS,
   3133                                TranscendentalCacheStub::TAGGED);
   3134   ASSERT(args->length() == 1);
   3135   VisitForStackValue(args->at(0));
   3136   __ CallStub(&stub);
   3137   context()->Plug(r0);
   3138 }
   3139 
   3140 
   3141 void FullCodeGenerator::EmitMathLog(ZoneList<Expression*>* args) {
   3142   // Load the argument on the stack and call the stub.
   3143   TranscendentalCacheStub stub(TranscendentalCache::LOG,
   3144                                TranscendentalCacheStub::TAGGED);
   3145   ASSERT(args->length() == 1);
   3146   VisitForStackValue(args->at(0));
   3147   __ CallStub(&stub);
   3148   context()->Plug(r0);
   3149 }
   3150 
   3151 
   3152 void FullCodeGenerator::EmitMathSqrt(ZoneList<Expression*>* args) {
   3153   // Load the argument on the stack and call the runtime function.
   3154   ASSERT(args->length() == 1);
   3155   VisitForStackValue(args->at(0));
   3156   __ CallRuntime(Runtime::kMath_sqrt, 1);
   3157   context()->Plug(r0);
   3158 }
   3159 
   3160 
   3161 void FullCodeGenerator::EmitCallFunction(ZoneList<Expression*>* args) {
   3162   ASSERT(args->length() >= 2);
   3163 
   3164   int arg_count = args->length() - 2;  // For receiver and function.
   3165   VisitForStackValue(args->at(0));  // Receiver.
   3166   for (int i = 0; i < arg_count; i++) {
   3167     VisitForStackValue(args->at(i + 1));
   3168   }
   3169   VisitForAccumulatorValue(args->at(arg_count + 1));  // Function.
   3170 
   3171   // InvokeFunction requires function in r1. Move it in there.
   3172   if (!result_register().is(r1)) __ mov(r1, result_register());
   3173   ParameterCount count(arg_count);
   3174   __ InvokeFunction(r1, count, CALL_FUNCTION);
   3175   __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
   3176   context()->Plug(r0);
   3177 }
   3178 
   3179 
   3180 void FullCodeGenerator::EmitRegExpConstructResult(ZoneList<Expression*>* args) {
   3181   RegExpConstructResultStub stub;
   3182   ASSERT(args->length() == 3);
   3183   VisitForStackValue(args->at(0));
   3184   VisitForStackValue(args->at(1));
   3185   VisitForStackValue(args->at(2));
   3186   __ CallStub(&stub);
   3187   context()->Plug(r0);
   3188 }
   3189 
   3190 
   3191 void FullCodeGenerator::EmitSwapElements(ZoneList<Expression*>* args) {
   3192   ASSERT(args->length() == 3);
   3193   VisitForStackValue(args->at(0));
   3194   VisitForStackValue(args->at(1));
   3195   VisitForStackValue(args->at(2));
   3196   Label done;
   3197   Label slow_case;
   3198   Register object = r0;
   3199   Register index1 = r1;
   3200   Register index2 = r2;
   3201   Register elements = r3;
   3202   Register scratch1 = r4;
   3203   Register scratch2 = r5;
   3204 
   3205   __ ldr(object, MemOperand(sp, 2 * kPointerSize));
   3206   // Fetch the map and check if array is in fast case.
   3207   // Check that object doesn't require security checks and
   3208   // has no indexed interceptor.
   3209   __ CompareObjectType(object, scratch1, scratch2, JS_ARRAY_TYPE);
   3210   __ b(ne, &slow_case);
   3211   // Map is now in scratch1.
   3212 
   3213   __ ldrb(scratch2, FieldMemOperand(scratch1, Map::kBitFieldOffset));
   3214   __ tst(scratch2, Operand(KeyedLoadIC::kSlowCaseBitFieldMask));
   3215   __ b(ne, &slow_case);
   3216 
   3217   // Check the object's elements are in fast case and writable.
   3218   __ ldr(elements, FieldMemOperand(object, JSObject::kElementsOffset));
   3219   __ ldr(scratch1, FieldMemOperand(elements, HeapObject::kMapOffset));
   3220   __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
   3221   __ cmp(scratch1, ip);
   3222   __ b(ne, &slow_case);
   3223 
   3224   // Check that both indices are smis.
   3225   __ ldr(index1, MemOperand(sp, 1 * kPointerSize));
   3226   __ ldr(index2, MemOperand(sp, 0));
   3227   __ JumpIfNotBothSmi(index1, index2, &slow_case);
   3228 
   3229   // Check that both indices are valid.
   3230   __ ldr(scratch1, FieldMemOperand(object, JSArray::kLengthOffset));
   3231   __ cmp(scratch1, index1);
   3232   __ cmp(scratch1, index2, hi);
   3233   __ b(ls, &slow_case);
   3234 
   3235   // Bring the address of the elements into index1 and index2.
   3236   __ add(scratch1, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
   3237   __ add(index1,
   3238          scratch1,
   3239          Operand(index1, LSL, kPointerSizeLog2 - kSmiTagSize));
   3240   __ add(index2,
   3241          scratch1,
   3242          Operand(index2, LSL, kPointerSizeLog2 - kSmiTagSize));
   3243 
   3244   // Swap elements.
   3245   __ ldr(scratch1, MemOperand(index1, 0));
   3246   __ ldr(scratch2, MemOperand(index2, 0));
   3247   __ str(scratch1, MemOperand(index2, 0));
   3248   __ str(scratch2, MemOperand(index1, 0));
   3249 
   3250   Label new_space;
   3251   __ InNewSpace(elements, scratch1, eq, &new_space);
   3252   // Possible optimization: do a check that both values are Smis
   3253   // (or them and test against Smi mask.)
   3254 
   3255   __ mov(scratch1, elements);
   3256   __ RecordWriteHelper(elements, index1, scratch2);
   3257   __ RecordWriteHelper(scratch1, index2, scratch2);  // scratch1 holds elements.
   3258 
   3259   __ bind(&new_space);
   3260   // We are done. Drop elements from the stack, and return undefined.
   3261   __ Drop(3);
   3262   __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
   3263   __ jmp(&done);
   3264 
   3265   __ bind(&slow_case);
   3266   __ CallRuntime(Runtime::kSwapElements, 3);
   3267 
   3268   __ bind(&done);
   3269   context()->Plug(r0);
   3270 }
   3271 
   3272 
   3273 void FullCodeGenerator::EmitGetFromCache(ZoneList<Expression*>* args) {
   3274   ASSERT_EQ(2, args->length());
   3275 
   3276   ASSERT_NE(NULL, args->at(0)->AsLiteral());
   3277   int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
   3278 
   3279   Handle<FixedArray> jsfunction_result_caches(
   3280       isolate()->global_context()->jsfunction_result_caches());
   3281   if (jsfunction_result_caches->length() <= cache_id) {
   3282     __ Abort("Attempt to use undefined cache.");
   3283     __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
   3284     context()->Plug(r0);
   3285     return;
   3286   }
   3287 
   3288   VisitForAccumulatorValue(args->at(1));
   3289 
   3290   Register key = r0;
   3291   Register cache = r1;
   3292   __ ldr(cache, ContextOperand(cp, Context::GLOBAL_INDEX));
   3293   __ ldr(cache, FieldMemOperand(cache, GlobalObject::kGlobalContextOffset));
   3294   __ ldr(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
   3295   __ ldr(cache,
   3296          FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
   3297 
   3298 
   3299   Label done, not_found;
   3300   // tmp now holds finger offset as a smi.
   3301   ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
   3302   __ ldr(r2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
   3303   // r2 now holds finger offset as a smi.
   3304   __ add(r3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
   3305   // r3 now points to the start of fixed array elements.
   3306   __ ldr(r2, MemOperand(r3, r2, LSL, kPointerSizeLog2 - kSmiTagSize, PreIndex));
   3307   // Note side effect of PreIndex: r3 now points to the key of the pair.
   3308   __ cmp(key, r2);
   3309   __ b(ne, &not_found);
   3310 
   3311   __ ldr(r0, MemOperand(r3, kPointerSize));
   3312   __ b(&done);
   3313 
   3314   __ bind(&not_found);
   3315   // Call runtime to perform the lookup.
   3316   __ Push(cache, key);
   3317   __ CallRuntime(Runtime::kGetFromCache, 2);
   3318 
   3319   __ bind(&done);
   3320   context()->Plug(r0);
   3321 }
   3322 
   3323 
   3324 void FullCodeGenerator::EmitIsRegExpEquivalent(ZoneList<Expression*>* args) {
   3325   ASSERT_EQ(2, args->length());
   3326 
   3327   Register right = r0;
   3328   Register left = r1;
   3329   Register tmp = r2;
   3330   Register tmp2 = r3;
   3331 
   3332   VisitForStackValue(args->at(0));
   3333   VisitForAccumulatorValue(args->at(1));
   3334   __ pop(left);
   3335 
   3336   Label done, fail, ok;
   3337   __ cmp(left, Operand(right));
   3338   __ b(eq, &ok);
   3339   // Fail if either is a non-HeapObject.
   3340   __ and_(tmp, left, Operand(right));
   3341   __ tst(tmp, Operand(kSmiTagMask));
   3342   __ b(eq, &fail);
   3343   __ ldr(tmp, FieldMemOperand(left, HeapObject::kMapOffset));
   3344   __ ldrb(tmp2, FieldMemOperand(tmp, Map::kInstanceTypeOffset));
   3345   __ cmp(tmp2, Operand(JS_REGEXP_TYPE));
   3346   __ b(ne, &fail);
   3347   __ ldr(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
   3348   __ cmp(tmp, Operand(tmp2));
   3349   __ b(ne, &fail);
   3350   __ ldr(tmp, FieldMemOperand(left, JSRegExp::kDataOffset));
   3351   __ ldr(tmp2, FieldMemOperand(right, JSRegExp::kDataOffset));
   3352   __ cmp(tmp, tmp2);
   3353   __ b(eq, &ok);
   3354   __ bind(&fail);
   3355   __ LoadRoot(r0, Heap::kFalseValueRootIndex);
   3356   __ jmp(&done);
   3357   __ bind(&ok);
   3358   __ LoadRoot(r0, Heap::kTrueValueRootIndex);
   3359   __ bind(&done);
   3360 
   3361   context()->Plug(r0);
   3362 }
   3363 
   3364 
   3365 void FullCodeGenerator::EmitHasCachedArrayIndex(ZoneList<Expression*>* args) {
   3366   VisitForAccumulatorValue(args->at(0));
   3367 
   3368   Label materialize_true, materialize_false;
   3369   Label* if_true = NULL;
   3370   Label* if_false = NULL;
   3371   Label* fall_through = NULL;
   3372   context()->PrepareTest(&materialize_true, &materialize_false,
   3373                          &if_true, &if_false, &fall_through);
   3374 
   3375   __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
   3376   __ tst(r0, Operand(String::kContainsCachedArrayIndexMask));
   3377   PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
   3378   Split(eq, if_true, if_false, fall_through);
   3379 
   3380   context()->Plug(if_true, if_false);
   3381 }
   3382 
   3383 
   3384 void FullCodeGenerator::EmitGetCachedArrayIndex(ZoneList<Expression*>* args) {
   3385   ASSERT(args->length() == 1);
   3386   VisitForAccumulatorValue(args->at(0));
   3387 
   3388   if (FLAG_debug_code) {
   3389     __ AbortIfNotString(r0);
   3390   }
   3391 
   3392   __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
   3393   __ IndexFromHash(r0, r0);
   3394 
   3395   context()->Plug(r0);
   3396 }
   3397 
   3398 
   3399 void FullCodeGenerator::EmitFastAsciiArrayJoin(ZoneList<Expression*>* args) {
   3400   Label bailout, done, one_char_separator, long_separator,
   3401       non_trivial_array, not_size_one_array, loop,
   3402       empty_separator_loop, one_char_separator_loop,
   3403       one_char_separator_loop_entry, long_separator_loop;
   3404 
   3405   ASSERT(args->length() == 2);
   3406   VisitForStackValue(args->at(1));
   3407   VisitForAccumulatorValue(args->at(0));
   3408 
   3409   // All aliases of the same register have disjoint lifetimes.
   3410   Register array = r0;
   3411   Register elements = no_reg;  // Will be r0.
   3412   Register result = no_reg;  // Will be r0.
   3413   Register separator = r1;
   3414   Register array_length = r2;
   3415   Register result_pos = no_reg;  // Will be r2
   3416   Register string_length = r3;
   3417   Register string = r4;
   3418   Register element = r5;
   3419   Register elements_end = r6;
   3420   Register scratch1 = r7;
   3421   Register scratch2 = r9;
   3422 
   3423   // Separator operand is on the stack.
   3424   __ pop(separator);
   3425 
   3426   // Check that the array is a JSArray.
   3427   __ JumpIfSmi(array, &bailout);
   3428   __ CompareObjectType(array, scratch1, scratch2, JS_ARRAY_TYPE);
   3429   __ b(ne, &bailout);
   3430 
   3431   // Check that the array has fast elements.
   3432   __ ldrb(scratch2, FieldMemOperand(scratch1, Map::kBitField2Offset));
   3433   __ tst(scratch2, Operand(1 << Map::kHasFastElements));
   3434   __ b(eq, &bailout);
   3435 
   3436   // If the array has length zero, return the empty string.
   3437   __ ldr(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
   3438   __ SmiUntag(array_length, SetCC);
   3439   __ b(ne, &non_trivial_array);
   3440   __ LoadRoot(r0, Heap::kEmptyStringRootIndex);
   3441   __ b(&done);
   3442 
   3443   __ bind(&non_trivial_array);
   3444 
   3445   // Get the FixedArray containing array's elements.
   3446   elements = array;
   3447   __ ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
   3448   array = no_reg;  // End of array's live range.
   3449 
   3450   // Check that all array elements are sequential ASCII strings, and
   3451   // accumulate the sum of their lengths, as a smi-encoded value.
   3452   __ mov(string_length, Operand(0));
   3453   __ add(element,
   3454          elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
   3455   __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
   3456   // Loop condition: while (element < elements_end).
   3457   // Live values in registers:
   3458   //   elements: Fixed array of strings.
   3459   //   array_length: Length of the fixed array of strings (not smi)
   3460   //   separator: Separator string
   3461   //   string_length: Accumulated sum of string lengths (smi).
   3462   //   element: Current array element.
   3463   //   elements_end: Array end.
   3464   if (FLAG_debug_code) {
   3465     __ cmp(array_length, Operand(0));
   3466     __ Assert(gt, "No empty arrays here in EmitFastAsciiArrayJoin");
   3467   }
   3468   __ bind(&loop);
   3469   __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
   3470   __ JumpIfSmi(string, &bailout);
   3471   __ ldr(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
   3472   __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
   3473   __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
   3474   __ ldr(scratch1, FieldMemOperand(string, SeqAsciiString::kLengthOffset));
   3475   __ add(string_length, string_length, Operand(scratch1));
   3476   __ b(vs, &bailout);
   3477   __ cmp(element, elements_end);
   3478   __ b(lt, &loop);
   3479 
   3480   // If array_length is 1, return elements[0], a string.
   3481   __ cmp(array_length, Operand(1));
   3482   __ b(ne, &not_size_one_array);
   3483   __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize));
   3484   __ b(&done);
   3485 
   3486   __ bind(&not_size_one_array);
   3487 
   3488   // Live values in registers:
   3489   //   separator: Separator string
   3490   //   array_length: Length of the array.
   3491   //   string_length: Sum of string lengths (smi).
   3492   //   elements: FixedArray of strings.
   3493 
   3494   // Check that the separator is a flat ASCII string.
   3495   __ JumpIfSmi(separator, &bailout);
   3496   __ ldr(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
   3497   __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
   3498   __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
   3499 
   3500   // Add (separator length times array_length) - separator length to the
   3501   // string_length to get the length of the result string. array_length is not
   3502   // smi but the other values are, so the result is a smi
   3503   __ ldr(scratch1, FieldMemOperand(separator, SeqAsciiString::kLengthOffset));
   3504   __ sub(string_length, string_length, Operand(scratch1));
   3505   __ smull(scratch2, ip, array_length, scratch1);
   3506   // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
   3507   // zero.
   3508   __ cmp(ip, Operand(0));
   3509   __ b(ne, &bailout);
   3510   __ tst(scratch2, Operand(0x80000000));
   3511   __ b(ne, &bailout);
   3512   __ add(string_length, string_length, Operand(scratch2));
   3513   __ b(vs, &bailout);
   3514   __ SmiUntag(string_length);
   3515 
   3516   // Get first element in the array to free up the elements register to be used
   3517   // for the result.
   3518   __ add(element,
   3519          elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
   3520   result = elements;  // End of live range for elements.
   3521   elements = no_reg;
   3522   // Live values in registers:
   3523   //   element: First array element
   3524   //   separator: Separator string
   3525   //   string_length: Length of result string (not smi)
   3526   //   array_length: Length of the array.
   3527   __ AllocateAsciiString(result,
   3528                          string_length,
   3529                          scratch1,
   3530                          scratch2,
   3531                          elements_end,
   3532                          &bailout);
   3533   // Prepare for looping. Set up elements_end to end of the array. Set
   3534   // result_pos to the position of the result where to write the first
   3535   // character.
   3536   __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
   3537   result_pos = array_length;  // End of live range for array_length.
   3538   array_length = no_reg;
   3539   __ add(result_pos,
   3540          result,
   3541          Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
   3542 
   3543   // Check the length of the separator.
   3544   __ ldr(scratch1, FieldMemOperand(separator, SeqAsciiString::kLengthOffset));
   3545   __ cmp(scratch1, Operand(Smi::FromInt(1)));
   3546   __ b(eq, &one_char_separator);
   3547   __ b(gt, &long_separator);
   3548 
   3549   // Empty separator case
   3550   __ bind(&empty_separator_loop);
   3551   // Live values in registers:
   3552   //   result_pos: the position to which we are currently copying characters.
   3553   //   element: Current array element.
   3554   //   elements_end: Array end.
   3555 
   3556   // Copy next array element to the result.
   3557   __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
   3558   __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
   3559   __ SmiUntag(string_length);
   3560   __ add(string, string, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
   3561   __ CopyBytes(string, result_pos, string_length, scratch1);
   3562   __ cmp(element, elements_end);
   3563   __ b(lt, &empty_separator_loop);  // End while (element < elements_end).
   3564   ASSERT(result.is(r0));
   3565   __ b(&done);
   3566 
   3567   // One-character separator case
   3568   __ bind(&one_char_separator);
   3569   // Replace separator with its ascii character value.
   3570   __ ldrb(separator, FieldMemOperand(separator, SeqAsciiString::kHeaderSize));
   3571   // Jump into the loop after the code that copies the separator, so the first
   3572   // element is not preceded by a separator
   3573   __ jmp(&one_char_separator_loop_entry);
   3574 
   3575   __ bind(&one_char_separator_loop);
   3576   // Live values in registers:
   3577   //   result_pos: the position to which we are currently copying characters.
   3578   //   element: Current array element.
   3579   //   elements_end: Array end.
   3580   //   separator: Single separator ascii char (in lower byte).
   3581 
   3582   // Copy the separator character to the result.
   3583   __ strb(separator, MemOperand(result_pos, 1, PostIndex));
   3584 
   3585   // Copy next array element to the result.
   3586   __ bind(&one_char_separator_loop_entry);
   3587   __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
   3588   __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
   3589   __ SmiUntag(string_length);
   3590   __ add(string, string, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
   3591   __ CopyBytes(string, result_pos, string_length, scratch1);
   3592   __ cmp(element, elements_end);
   3593   __ b(lt, &one_char_separator_loop);  // End while (element < elements_end).
   3594   ASSERT(result.is(r0));
   3595   __ b(&done);
   3596 
   3597   // Long separator case (separator is more than one character). Entry is at the
   3598   // label long_separator below.
   3599   __ bind(&long_separator_loop);
   3600   // Live values in registers:
   3601   //   result_pos: the position to which we are currently copying characters.
   3602   //   element: Current array element.
   3603   //   elements_end: Array end.
   3604   //   separator: Separator string.
   3605 
   3606   // Copy the separator to the result.
   3607   __ ldr(string_length, FieldMemOperand(separator, String::kLengthOffset));
   3608   __ SmiUntag(string_length);
   3609   __ add(string,
   3610          separator,
   3611          Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
   3612   __ CopyBytes(string, result_pos, string_length, scratch1);
   3613 
   3614   __ bind(&long_separator);
   3615   __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
   3616   __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
   3617   __ SmiUntag(string_length);
   3618   __ add(string, string, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
   3619   __ CopyBytes(string, result_pos, string_length, scratch1);
   3620   __ cmp(element, elements_end);
   3621   __ b(lt, &long_separator_loop);  // End while (element < elements_end).
   3622   ASSERT(result.is(r0));
   3623   __ b(&done);
   3624 
   3625   __ bind(&bailout);
   3626   __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
   3627   __ bind(&done);
   3628   context()->Plug(r0);
   3629 }
   3630 
   3631 
   3632 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
   3633   Handle<String> name = expr->name();
   3634   if (name->length() > 0 && name->Get(0) == '_') {
   3635     Comment cmnt(masm_, "[ InlineRuntimeCall");
   3636     EmitInlineRuntimeCall(expr);
   3637     return;
   3638   }
   3639 
   3640   Comment cmnt(masm_, "[ CallRuntime");
   3641   ZoneList<Expression*>* args = expr->arguments();
   3642 
   3643   if (expr->is_jsruntime()) {
   3644     // Prepare for calling JS runtime function.
   3645     __ ldr(r0, GlobalObjectOperand());
   3646     __ ldr(r0, FieldMemOperand(r0, GlobalObject::kBuiltinsOffset));
   3647     __ push(r0);
   3648   }
   3649 
   3650   // Push the arguments ("left-to-right").
   3651   int arg_count = args->length();
   3652   for (int i = 0; i < arg_count; i++) {
   3653     VisitForStackValue(args->at(i));
   3654   }
   3655 
   3656   if (expr->is_jsruntime()) {
   3657     // Call the JS runtime function.
   3658     __ mov(r2, Operand(expr->name()));
   3659     Handle<Code> ic =
   3660         isolate()->stub_cache()->ComputeCallInitialize(arg_count, NOT_IN_LOOP);
   3661     EmitCallIC(ic, RelocInfo::CODE_TARGET);
   3662     // Restore context register.
   3663     __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
   3664   } else {
   3665     // Call the C runtime function.
   3666     __ CallRuntime(expr->function(), arg_count);
   3667   }
   3668   context()->Plug(r0);
   3669 }
   3670 
   3671 
   3672 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
   3673   switch (expr->op()) {
   3674     case Token::DELETE: {
   3675       Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
   3676       Property* prop = expr->expression()->AsProperty();
   3677       Variable* var = expr->expression()->AsVariableProxy()->AsVariable();
   3678 
   3679       if (prop != NULL) {
   3680         if (prop->is_synthetic()) {
   3681           // Result of deleting parameters is false, even when they rewrite
   3682           // to accesses on the arguments object.
   3683           context()->Plug(false);
   3684         } else {
   3685           VisitForStackValue(prop->obj());
   3686           VisitForStackValue(prop->key());
   3687           __ mov(r1, Operand(Smi::FromInt(strict_mode_flag())));
   3688           __ push(r1);
   3689           __ InvokeBuiltin(Builtins::DELETE, CALL_JS);
   3690           context()->Plug(r0);
   3691         }
   3692       } else if (var != NULL) {
   3693         // Delete of an unqualified identifier is disallowed in strict mode
   3694         // but "delete this" is.
   3695         ASSERT(strict_mode_flag() == kNonStrictMode || var->is_this());
   3696         if (var->is_global()) {
   3697           __ ldr(r2, GlobalObjectOperand());
   3698           __ mov(r1, Operand(var->name()));
   3699           __ mov(r0, Operand(Smi::FromInt(kNonStrictMode)));
   3700           __ Push(r2, r1, r0);
   3701           __ InvokeBuiltin(Builtins::DELETE, CALL_JS);
   3702           context()->Plug(r0);
   3703         } else if (var->AsSlot() != NULL &&
   3704                    var->AsSlot()->type() != Slot::LOOKUP) {
   3705           // Result of deleting non-global, non-dynamic variables is false.
   3706           // The subexpression does not have side effects.
   3707           context()->Plug(false);
   3708         } else {
   3709           // Non-global variable.  Call the runtime to try to delete from the
   3710           // context where the variable was introduced.
   3711           __ push(context_register());
   3712           __ mov(r2, Operand(var->name()));
   3713           __ push(r2);
   3714           __ CallRuntime(Runtime::kDeleteContextSlot, 2);
   3715           context()->Plug(r0);
   3716         }
   3717       } else {
   3718         // Result of deleting non-property, non-variable reference is true.
   3719         // The subexpression may have side effects.
   3720         VisitForEffect(expr->expression());
   3721         context()->Plug(true);
   3722       }
   3723       break;
   3724     }
   3725 
   3726     case Token::VOID: {
   3727       Comment cmnt(masm_, "[ UnaryOperation (VOID)");
   3728       VisitForEffect(expr->expression());
   3729       context()->Plug(Heap::kUndefinedValueRootIndex);
   3730       break;
   3731     }
   3732 
   3733     case Token::NOT: {
   3734       Comment cmnt(masm_, "[ UnaryOperation (NOT)");
   3735       if (context()->IsEffect()) {
   3736         // Unary NOT has no side effects so it's only necessary to visit the
   3737         // subexpression.  Match the optimizing compiler by not branching.
   3738         VisitForEffect(expr->expression());
   3739       } else {
   3740         Label materialize_true, materialize_false;
   3741         Label* if_true = NULL;
   3742         Label* if_false = NULL;
   3743         Label* fall_through = NULL;
   3744 
   3745         // Notice that the labels are swapped.
   3746         context()->PrepareTest(&materialize_true, &materialize_false,
   3747                                &if_false, &if_true, &fall_through);
   3748         if (context()->IsTest()) ForwardBailoutToChild(expr);
   3749         VisitForControl(expr->expression(), if_true, if_false, fall_through);
   3750         context()->Plug(if_false, if_true);  // Labels swapped.
   3751       }
   3752       break;
   3753     }
   3754 
   3755     case Token::TYPEOF: {
   3756       Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
   3757       { StackValueContext context(this);
   3758         VisitForTypeofValue(expr->expression());
   3759       }
   3760       __ CallRuntime(Runtime::kTypeof, 1);
   3761       context()->Plug(r0);
   3762       break;
   3763     }
   3764 
   3765     case Token::ADD: {
   3766       Comment cmt(masm_, "[ UnaryOperation (ADD)");
   3767       VisitForAccumulatorValue(expr->expression());
   3768       Label no_conversion;
   3769       __ tst(result_register(), Operand(kSmiTagMask));
   3770       __ b(eq, &no_conversion);
   3771       ToNumberStub convert_stub;
   3772       __ CallStub(&convert_stub);
   3773       __ bind(&no_conversion);
   3774       context()->Plug(result_register());
   3775       break;
   3776     }
   3777 
   3778     case Token::SUB: {
   3779       Comment cmt(masm_, "[ UnaryOperation (SUB)");
   3780       bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
   3781       UnaryOverwriteMode overwrite =
   3782           can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
   3783       GenericUnaryOpStub stub(Token::SUB, overwrite, NO_UNARY_FLAGS);
   3784       // GenericUnaryOpStub expects the argument to be in the
   3785       // accumulator register r0.
   3786       VisitForAccumulatorValue(expr->expression());
   3787       __ CallStub(&stub);
   3788       context()->Plug(r0);
   3789       break;
   3790     }
   3791 
   3792     case Token::BIT_NOT: {
   3793       Comment cmt(masm_, "[ UnaryOperation (BIT_NOT)");
   3794       // The generic unary operation stub expects the argument to be
   3795       // in the accumulator register r0.
   3796       VisitForAccumulatorValue(expr->expression());
   3797       Label done;
   3798       bool inline_smi_code = ShouldInlineSmiCase(expr->op());
   3799       if (inline_smi_code) {
   3800         Label call_stub;
   3801         __ JumpIfNotSmi(r0, &call_stub);
   3802         __ mvn(r0, Operand(r0));
   3803         // Bit-clear inverted smi-tag.
   3804         __ bic(r0, r0, Operand(kSmiTagMask));
   3805         __ b(&done);
   3806         __ bind(&call_stub);
   3807       }
   3808       bool overwrite = expr->expression()->ResultOverwriteAllowed();
   3809       UnaryOpFlags flags = inline_smi_code
   3810           ? NO_UNARY_SMI_CODE_IN_STUB
   3811           : NO_UNARY_FLAGS;
   3812       UnaryOverwriteMode mode =
   3813           overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
   3814       GenericUnaryOpStub stub(Token::BIT_NOT, mode, flags);
   3815       __ CallStub(&stub);
   3816       __ bind(&done);
   3817       context()->Plug(r0);
   3818       break;
   3819     }
   3820 
   3821     default:
   3822       UNREACHABLE();
   3823   }
   3824 }
   3825 
   3826 
   3827 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
   3828   Comment cmnt(masm_, "[ CountOperation");
   3829   SetSourcePosition(expr->position());
   3830 
   3831   // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
   3832   // as the left-hand side.
   3833   if (!expr->expression()->IsValidLeftHandSide()) {
   3834     VisitForEffect(expr->expression());
   3835     return;
   3836   }
   3837 
   3838   // Expression can only be a property, a global or a (parameter or local)
   3839   // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
   3840   enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
   3841   LhsKind assign_type = VARIABLE;
   3842   Property* prop = expr->expression()->AsProperty();
   3843   // In case of a property we use the uninitialized expression context
   3844   // of the key to detect a named property.
   3845   if (prop != NULL) {
   3846     assign_type =
   3847         (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
   3848   }
   3849 
   3850   // Evaluate expression and get value.
   3851   if (assign_type == VARIABLE) {
   3852     ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
   3853     AccumulatorValueContext context(this);
   3854     EmitVariableLoad(expr->expression()->AsVariableProxy()->var());
   3855   } else {
   3856     // Reserve space for result of postfix operation.
   3857     if (expr->is_postfix() && !context()->IsEffect()) {
   3858       __ mov(ip, Operand(Smi::FromInt(0)));
   3859       __ push(ip);
   3860     }
   3861     if (assign_type == NAMED_PROPERTY) {
   3862       // Put the object both on the stack and in the accumulator.
   3863       VisitForAccumulatorValue(prop->obj());
   3864       __ push(r0);
   3865       EmitNamedPropertyLoad(prop);
   3866     } else {
   3867       if (prop->is_arguments_access()) {
   3868         VariableProxy* obj_proxy = prop->obj()->AsVariableProxy();
   3869         __ ldr(r0, EmitSlotSearch(obj_proxy->var()->AsSlot(), r0));
   3870         __ push(r0);
   3871         __ mov(r0, Operand(prop->key()->AsLiteral()->handle()));
   3872       } else {
   3873         VisitForStackValue(prop->obj());
   3874         VisitForAccumulatorValue(prop->key());
   3875       }
   3876       __ ldr(r1, MemOperand(sp, 0));
   3877       __ push(r0);
   3878       EmitKeyedPropertyLoad(prop);
   3879     }
   3880   }
   3881 
   3882   // We need a second deoptimization point after loading the value
   3883   // in case evaluating the property load my have a side effect.
   3884   if (assign_type == VARIABLE) {
   3885     PrepareForBailout(expr->expression(), TOS_REG);
   3886   } else {
   3887     PrepareForBailoutForId(expr->CountId(), TOS_REG);
   3888   }
   3889 
   3890   // Call ToNumber only if operand is not a smi.
   3891   Label no_conversion;
   3892   __ JumpIfSmi(r0, &no_conversion);
   3893   ToNumberStub convert_stub;
   3894   __ CallStub(&convert_stub);
   3895   __ bind(&no_conversion);
   3896 
   3897   // Save result for postfix expressions.
   3898   if (expr->is_postfix()) {
   3899     if (!context()->IsEffect()) {
   3900       // Save the result on the stack. If we have a named or keyed property
   3901       // we store the result under the receiver that is currently on top
   3902       // of the stack.
   3903       switch (assign_type) {
   3904         case VARIABLE:
   3905           __ push(r0);
   3906           break;
   3907         case NAMED_PROPERTY:
   3908           __ str(r0, MemOperand(sp, kPointerSize));
   3909           break;
   3910         case KEYED_PROPERTY:
   3911           __ str(r0, MemOperand(sp, 2 * kPointerSize));
   3912           break;
   3913       }
   3914     }
   3915   }
   3916 
   3917 
   3918   // Inline smi case if we are in a loop.
   3919   Label stub_call, done;
   3920   JumpPatchSite patch_site(masm_);
   3921 
   3922   int count_value = expr->op() == Token::INC ? 1 : -1;
   3923   if (ShouldInlineSmiCase(expr->op())) {
   3924     __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC);
   3925     __ b(vs, &stub_call);
   3926     // We could eliminate this smi check if we split the code at
   3927     // the first smi check before calling ToNumber.
   3928     patch_site.EmitJumpIfSmi(r0, &done);
   3929 
   3930     __ bind(&stub_call);
   3931     // Call stub. Undo operation first.
   3932     __ sub(r0, r0, Operand(Smi::FromInt(count_value)));
   3933   }
   3934   __ mov(r1, Operand(Smi::FromInt(count_value)));
   3935 
   3936   // Record position before stub call.
   3937   SetSourcePosition(expr->position());
   3938 
   3939   TypeRecordingBinaryOpStub stub(Token::ADD, NO_OVERWRITE);
   3940   EmitCallIC(stub.GetCode(), &patch_site);
   3941   __ bind(&done);
   3942 
   3943   // Store the value returned in r0.
   3944   switch (assign_type) {
   3945     case VARIABLE:
   3946       if (expr->is_postfix()) {
   3947         { EffectContext context(this);
   3948           EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
   3949                                  Token::ASSIGN);
   3950           PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   3951           context.Plug(r0);
   3952         }
   3953         // For all contexts except EffectConstant We have the result on
   3954         // top of the stack.
   3955         if (!context()->IsEffect()) {
   3956           context()->PlugTOS();
   3957         }
   3958       } else {
   3959         EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
   3960                                Token::ASSIGN);
   3961         PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   3962         context()->Plug(r0);
   3963       }
   3964       break;
   3965     case NAMED_PROPERTY: {
   3966       __ mov(r2, Operand(prop->key()->AsLiteral()->handle()));
   3967       __ pop(r1);
   3968       Handle<Code> ic = is_strict_mode()
   3969           ? isolate()->builtins()->StoreIC_Initialize_Strict()
   3970           : isolate()->builtins()->StoreIC_Initialize();
   3971       EmitCallIC(ic, RelocInfo::CODE_TARGET);
   3972       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   3973       if (expr->is_postfix()) {
   3974         if (!context()->IsEffect()) {
   3975           context()->PlugTOS();
   3976         }
   3977       } else {
   3978         context()->Plug(r0);
   3979       }
   3980       break;
   3981     }
   3982     case KEYED_PROPERTY: {
   3983       __ pop(r1);  // Key.
   3984       __ pop(r2);  // Receiver.
   3985       Handle<Code> ic = is_strict_mode()
   3986           ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
   3987           : isolate()->builtins()->KeyedStoreIC_Initialize();
   3988       EmitCallIC(ic, RelocInfo::CODE_TARGET);
   3989       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   3990       if (expr->is_postfix()) {
   3991         if (!context()->IsEffect()) {
   3992           context()->PlugTOS();
   3993         }
   3994       } else {
   3995         context()->Plug(r0);
   3996       }
   3997       break;
   3998     }
   3999   }
   4000 }
   4001 
   4002 
   4003 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
   4004   ASSERT(!context()->IsEffect());
   4005   ASSERT(!context()->IsTest());
   4006   VariableProxy* proxy = expr->AsVariableProxy();
   4007   if (proxy != NULL && !proxy->var()->is_this() && proxy->var()->is_global()) {
   4008     Comment cmnt(masm_, "Global variable");
   4009     __ ldr(r0, GlobalObjectOperand());
   4010     __ mov(r2, Operand(proxy->name()));
   4011     Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
   4012     // Use a regular load, not a contextual load, to avoid a reference
   4013     // error.
   4014     EmitCallIC(ic, RelocInfo::CODE_TARGET);
   4015     PrepareForBailout(expr, TOS_REG);
   4016     context()->Plug(r0);
   4017   } else if (proxy != NULL &&
   4018              proxy->var()->AsSlot() != NULL &&
   4019              proxy->var()->AsSlot()->type() == Slot::LOOKUP) {
   4020     Label done, slow;
   4021 
   4022     // Generate code for loading from variables potentially shadowed
   4023     // by eval-introduced variables.
   4024     Slot* slot = proxy->var()->AsSlot();
   4025     EmitDynamicLoadFromSlotFastCase(slot, INSIDE_TYPEOF, &slow, &done);
   4026 
   4027     __ bind(&slow);
   4028     __ mov(r0, Operand(proxy->name()));
   4029     __ Push(cp, r0);
   4030     __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
   4031     PrepareForBailout(expr, TOS_REG);
   4032     __ bind(&done);
   4033 
   4034     context()->Plug(r0);
   4035   } else {
   4036     // This expression cannot throw a reference error at the top level.
   4037     context()->HandleExpression(expr);
   4038   }
   4039 }
   4040 
   4041 
   4042 bool FullCodeGenerator::TryLiteralCompare(Token::Value op,
   4043                                           Expression* left,
   4044                                           Expression* right,
   4045                                           Label* if_true,
   4046                                           Label* if_false,
   4047                                           Label* fall_through) {
   4048   if (op != Token::EQ && op != Token::EQ_STRICT) return false;
   4049 
   4050   // Check for the pattern: typeof <expression> == <string literal>.
   4051   Literal* right_literal = right->AsLiteral();
   4052   if (right_literal == NULL) return false;
   4053   Handle<Object> right_literal_value = right_literal->handle();
   4054   if (!right_literal_value->IsString()) return false;
   4055   UnaryOperation* left_unary = left->AsUnaryOperation();
   4056   if (left_unary == NULL || left_unary->op() != Token::TYPEOF) return false;
   4057   Handle<String> check = Handle<String>::cast(right_literal_value);
   4058 
   4059   { AccumulatorValueContext context(this);
   4060     VisitForTypeofValue(left_unary->expression());
   4061   }
   4062   PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
   4063 
   4064   if (check->Equals(isolate()->heap()->number_symbol())) {
   4065     __ JumpIfSmi(r0, if_true);
   4066     __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
   4067     __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
   4068     __ cmp(r0, ip);
   4069     Split(eq, if_true, if_false, fall_through);
   4070   } else if (check->Equals(isolate()->heap()->string_symbol())) {
   4071     __ JumpIfSmi(r0, if_false);
   4072     // Check for undetectable objects => false.
   4073     __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE);
   4074     __ b(ge, if_false);
   4075     __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
   4076     __ tst(r1, Operand(1 << Map::kIsUndetectable));
   4077     Split(eq, if_true, if_false, fall_through);
   4078   } else if (check->Equals(isolate()->heap()->boolean_symbol())) {
   4079     __ CompareRoot(r0, Heap::kTrueValueRootIndex);
   4080     __ b(eq, if_true);
   4081     __ CompareRoot(r0, Heap::kFalseValueRootIndex);
   4082     Split(eq, if_true, if_false, fall_through);
   4083   } else if (check->Equals(isolate()->heap()->undefined_symbol())) {
   4084     __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
   4085     __ b(eq, if_true);
   4086     __ JumpIfSmi(r0, if_false);
   4087     // Check for undetectable objects => true.
   4088     __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
   4089     __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
   4090     __ tst(r1, Operand(1 << Map::kIsUndetectable));
   4091     Split(ne, if_true, if_false, fall_through);
   4092 
   4093   } else if (check->Equals(isolate()->heap()->function_symbol())) {
   4094     __ JumpIfSmi(r0, if_false);
   4095     __ CompareObjectType(r0, r1, r0, FIRST_FUNCTION_CLASS_TYPE);
   4096     Split(ge, if_true, if_false, fall_through);
   4097 
   4098   } else if (check->Equals(isolate()->heap()->object_symbol())) {
   4099     __ JumpIfSmi(r0, if_false);
   4100     __ CompareRoot(r0, Heap::kNullValueRootIndex);
   4101     __ b(eq, if_true);
   4102     // Check for JS objects => true.
   4103     __ CompareObjectType(r0, r0, r1, FIRST_JS_OBJECT_TYPE);
   4104     __ b(lo, if_false);
   4105     __ CompareInstanceType(r0, r1, FIRST_FUNCTION_CLASS_TYPE);
   4106     __ b(hs, if_false);
   4107     // Check for undetectable objects => false.
   4108     __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
   4109     __ tst(r1, Operand(1 << Map::kIsUndetectable));
   4110     Split(eq, if_true, if_false, fall_through);
   4111   } else {
   4112     if (if_false != fall_through) __ jmp(if_false);
   4113   }
   4114 
   4115   return true;
   4116 }
   4117 
   4118 
   4119 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
   4120   Comment cmnt(masm_, "[ CompareOperation");
   4121   SetSourcePosition(expr->position());
   4122 
   4123   // Always perform the comparison for its control flow.  Pack the result
   4124   // into the expression's context after the comparison is performed.
   4125 
   4126   Label materialize_true, materialize_false;
   4127   Label* if_true = NULL;
   4128   Label* if_false = NULL;
   4129   Label* fall_through = NULL;
   4130   context()->PrepareTest(&materialize_true, &materialize_false,
   4131                          &if_true, &if_false, &fall_through);
   4132 
   4133   // First we try a fast inlined version of the compare when one of
   4134   // the operands is a literal.
   4135   Token::Value op = expr->op();
   4136   Expression* left = expr->left();
   4137   Expression* right = expr->right();
   4138   if (TryLiteralCompare(op, left, right, if_true, if_false, fall_through)) {
   4139     context()->Plug(if_true, if_false);
   4140     return;
   4141   }
   4142 
   4143   VisitForStackValue(expr->left());
   4144   switch (op) {
   4145     case Token::IN:
   4146       VisitForStackValue(expr->right());
   4147       __ InvokeBuiltin(Builtins::IN, CALL_JS);
   4148       PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
   4149       __ LoadRoot(ip, Heap::kTrueValueRootIndex);
   4150       __ cmp(r0, ip);
   4151       Split(eq, if_true, if_false, fall_through);
   4152       break;
   4153 
   4154     case Token::INSTANCEOF: {
   4155       VisitForStackValue(expr->right());
   4156       InstanceofStub stub(InstanceofStub::kNoFlags);
   4157       __ CallStub(&stub);
   4158       PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
   4159       // The stub returns 0 for true.
   4160       __ tst(r0, r0);
   4161       Split(eq, if_true, if_false, fall_through);
   4162       break;
   4163     }
   4164 
   4165     default: {
   4166       VisitForAccumulatorValue(expr->right());
   4167       Condition cond = eq;
   4168       bool strict = false;
   4169       switch (op) {
   4170         case Token::EQ_STRICT:
   4171           strict = true;
   4172           // Fall through
   4173         case Token::EQ:
   4174           cond = eq;
   4175           __ pop(r1);
   4176           break;
   4177         case Token::LT:
   4178           cond = lt;
   4179           __ pop(r1);
   4180           break;
   4181         case Token::GT:
   4182           // Reverse left and right sides to obtain ECMA-262 conversion order.
   4183           cond = lt;
   4184           __ mov(r1, result_register());
   4185           __ pop(r0);
   4186          break;
   4187         case Token::LTE:
   4188           // Reverse left and right sides to obtain ECMA-262 conversion order.
   4189           cond = ge;
   4190           __ mov(r1, result_register());
   4191           __ pop(r0);
   4192           break;
   4193         case Token::GTE:
   4194           cond = ge;
   4195           __ pop(r1);
   4196           break;
   4197         case Token::IN:
   4198         case Token::INSTANCEOF:
   4199         default:
   4200           UNREACHABLE();
   4201       }
   4202 
   4203       bool inline_smi_code = ShouldInlineSmiCase(op);
   4204       JumpPatchSite patch_site(masm_);
   4205       if (inline_smi_code) {
   4206         Label slow_case;
   4207         __ orr(r2, r0, Operand(r1));
   4208         patch_site.EmitJumpIfNotSmi(r2, &slow_case);
   4209         __ cmp(r1, r0);
   4210         Split(cond, if_true, if_false, NULL);
   4211         __ bind(&slow_case);
   4212       }
   4213 
   4214       // Record position and call the compare IC.
   4215       SetSourcePosition(expr->position());
   4216       Handle<Code> ic = CompareIC::GetUninitialized(op);
   4217       EmitCallIC(ic, &patch_site);
   4218       PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
   4219       __ cmp(r0, Operand(0));
   4220       Split(cond, if_true, if_false, fall_through);
   4221     }
   4222   }
   4223 
   4224   // Convert the result of the comparison into one expected for this
   4225   // expression's context.
   4226   context()->Plug(if_true, if_false);
   4227 }
   4228 
   4229 
   4230 void FullCodeGenerator::VisitCompareToNull(CompareToNull* expr) {
   4231   Comment cmnt(masm_, "[ CompareToNull");
   4232   Label materialize_true, materialize_false;
   4233   Label* if_true = NULL;
   4234   Label* if_false = NULL;
   4235   Label* fall_through = NULL;
   4236   context()->PrepareTest(&materialize_true, &materialize_false,
   4237                          &if_true, &if_false, &fall_through);
   4238 
   4239   VisitForAccumulatorValue(expr->expression());
   4240   PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
   4241   __ LoadRoot(r1, Heap::kNullValueRootIndex);
   4242   __ cmp(r0, r1);
   4243   if (expr->is_strict()) {
   4244     Split(eq, if_true, if_false, fall_through);
   4245   } else {
   4246     __ b(eq, if_true);
   4247     __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
   4248     __ cmp(r0, r1);
   4249     __ b(eq, if_true);
   4250     __ tst(r0, Operand(kSmiTagMask));
   4251     __ b(eq, if_false);
   4252     // It can be an undetectable object.
   4253     __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
   4254     __ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset));
   4255     __ and_(r1, r1, Operand(1 << Map::kIsUndetectable));
   4256     __ cmp(r1, Operand(1 << Map::kIsUndetectable));
   4257     Split(eq, if_true, if_false, fall_through);
   4258   }
   4259   context()->Plug(if_true, if_false);
   4260 }
   4261 
   4262 
   4263 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
   4264   __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
   4265   context()->Plug(r0);
   4266 }
   4267 
   4268 
   4269 Register FullCodeGenerator::result_register() {
   4270   return r0;
   4271 }
   4272 
   4273 
   4274 Register FullCodeGenerator::context_register() {
   4275   return cp;
   4276 }
   4277 
   4278 
   4279 void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) {
   4280   ASSERT(mode == RelocInfo::CODE_TARGET ||
   4281          mode == RelocInfo::CODE_TARGET_CONTEXT);
   4282   Counters* counters = isolate()->counters();
   4283   switch (ic->kind()) {
   4284     case Code::LOAD_IC:
   4285       __ IncrementCounter(counters->named_load_full(), 1, r1, r2);
   4286       break;
   4287     case Code::KEYED_LOAD_IC:
   4288       __ IncrementCounter(counters->keyed_load_full(), 1, r1, r2);
   4289       break;
   4290     case Code::STORE_IC:
   4291       __ IncrementCounter(counters->named_store_full(), 1, r1, r2);
   4292       break;
   4293     case Code::KEYED_STORE_IC:
   4294       __ IncrementCounter(counters->keyed_store_full(), 1, r1, r2);
   4295     default:
   4296       break;
   4297   }
   4298 
   4299   __ Call(ic, mode);
   4300 }
   4301 
   4302 
   4303 void FullCodeGenerator::EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site) {
   4304   Counters* counters = isolate()->counters();
   4305   switch (ic->kind()) {
   4306     case Code::LOAD_IC:
   4307       __ IncrementCounter(counters->named_load_full(), 1, r1, r2);
   4308       break;
   4309     case Code::KEYED_LOAD_IC:
   4310       __ IncrementCounter(counters->keyed_load_full(), 1, r1, r2);
   4311       break;
   4312     case Code::STORE_IC:
   4313       __ IncrementCounter(counters->named_store_full(), 1, r1, r2);
   4314       break;
   4315     case Code::KEYED_STORE_IC:
   4316       __ IncrementCounter(counters->keyed_store_full(), 1, r1, r2);
   4317     default:
   4318       break;
   4319   }
   4320 
   4321   __ Call(ic, RelocInfo::CODE_TARGET);
   4322   if (patch_site != NULL && patch_site->is_bound()) {
   4323     patch_site->EmitPatchInfo();
   4324   } else {
   4325     __ nop();  // Signals no inlined code.
   4326   }
   4327 }
   4328 
   4329 
   4330 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
   4331   ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
   4332   __ str(value, MemOperand(fp, frame_offset));
   4333 }
   4334 
   4335 
   4336 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
   4337   __ ldr(dst, ContextOperand(cp, context_index));
   4338 }
   4339 
   4340 
   4341 // ----------------------------------------------------------------------------
   4342 // Non-local control flow support.
   4343 
   4344 void FullCodeGenerator::EnterFinallyBlock() {
   4345   ASSERT(!result_register().is(r1));
   4346   // Store result register while executing finally block.
   4347   __ push(result_register());
   4348   // Cook return address in link register to stack (smi encoded Code* delta)
   4349   __ sub(r1, lr, Operand(masm_->CodeObject()));
   4350   ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
   4351   ASSERT_EQ(0, kSmiTag);
   4352   __ add(r1, r1, Operand(r1));  // Convert to smi.
   4353   __ push(r1);
   4354 }
   4355 
   4356 
   4357 void FullCodeGenerator::ExitFinallyBlock() {
   4358   ASSERT(!result_register().is(r1));
   4359   // Restore result register from stack.
   4360   __ pop(r1);
   4361   // Uncook return address and return.
   4362   __ pop(result_register());
   4363   ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
   4364   __ mov(r1, Operand(r1, ASR, 1));  // Un-smi-tag value.
   4365   __ add(pc, r1, Operand(masm_->CodeObject()));
   4366 }
   4367 
   4368 
   4369 #undef __
   4370 
   4371 } }  // namespace v8::internal
   4372 
   4373 #endif  // V8_TARGET_ARCH_ARM
   4374