Home | History | Annotate | Download | only in mips
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "src/v8.h"
      6 
      7 #if V8_TARGET_ARCH_MIPS
      8 
      9 // Note on Mips implementation:
     10 //
     11 // The result_register() for mips is the 'v0' register, which is defined
     12 // by the ABI to contain function return values. However, the first
     13 // parameter to a function is defined to be 'a0'. So there are many
     14 // places where we have to move a previous result in v0 to a0 for the
     15 // next call: mov(a0, v0). This is not needed on the other architectures.
     16 
     17 #include "src/code-stubs.h"
     18 #include "src/codegen.h"
     19 #include "src/compiler.h"
     20 #include "src/debug.h"
     21 #include "src/full-codegen.h"
     22 #include "src/isolate-inl.h"
     23 #include "src/parser.h"
     24 #include "src/scopes.h"
     25 #include "src/stub-cache.h"
     26 
     27 #include "src/mips/code-stubs-mips.h"
     28 #include "src/mips/macro-assembler-mips.h"
     29 
     30 namespace v8 {
     31 namespace internal {
     32 
     33 #define __ ACCESS_MASM(masm_)
     34 
     35 
     36 // A patch site is a location in the code which it is possible to patch. This
     37 // class has a number of methods to emit the code which is patchable and the
     38 // method EmitPatchInfo to record a marker back to the patchable code. This
     39 // marker is a andi zero_reg, rx, #yyyy instruction, and rx * 0x0000ffff + yyyy
     40 // (raw 16 bit immediate value is used) is the delta from the pc to the first
     41 // instruction of the patchable code.
     42 // The marker instruction is effectively a NOP (dest is zero_reg) and will
     43 // never be emitted by normal code.
     44 class JumpPatchSite BASE_EMBEDDED {
     45  public:
     46   explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
     47 #ifdef DEBUG
     48     info_emitted_ = false;
     49 #endif
     50   }
     51 
     52   ~JumpPatchSite() {
     53     ASSERT(patch_site_.is_bound() == info_emitted_);
     54   }
     55 
     56   // When initially emitting this ensure that a jump is always generated to skip
     57   // the inlined smi code.
     58   void EmitJumpIfNotSmi(Register reg, Label* target) {
     59     ASSERT(!patch_site_.is_bound() && !info_emitted_);
     60     Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
     61     __ bind(&patch_site_);
     62     __ andi(at, reg, 0);
     63     // Always taken before patched.
     64     __ BranchShort(target, eq, at, Operand(zero_reg));
     65   }
     66 
     67   // When initially emitting this ensure that a jump is never generated to skip
     68   // the inlined smi code.
     69   void EmitJumpIfSmi(Register reg, Label* target) {
     70     Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
     71     ASSERT(!patch_site_.is_bound() && !info_emitted_);
     72     __ bind(&patch_site_);
     73     __ andi(at, reg, 0);
     74     // Never taken before patched.
     75     __ BranchShort(target, ne, at, Operand(zero_reg));
     76   }
     77 
     78   void EmitPatchInfo() {
     79     if (patch_site_.is_bound()) {
     80       int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
     81       Register reg = Register::from_code(delta_to_patch_site / kImm16Mask);
     82       __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask);
     83 #ifdef DEBUG
     84       info_emitted_ = true;
     85 #endif
     86     } else {
     87       __ nop();  // Signals no inlined code.
     88     }
     89   }
     90 
     91  private:
     92   MacroAssembler* masm_;
     93   Label patch_site_;
     94 #ifdef DEBUG
     95   bool info_emitted_;
     96 #endif
     97 };
     98 
     99 
    100 // Generate code for a JS function.  On entry to the function the receiver
    101 // and arguments have been pushed on the stack left to right.  The actual
    102 // argument count matches the formal parameter count expected by the
    103 // function.
    104 //
    105 // The live registers are:
    106 //   o a1: the JS function object being called (i.e. ourselves)
    107 //   o cp: our context
    108 //   o fp: our caller's frame pointer
    109 //   o sp: stack pointer
    110 //   o ra: return address
    111 //
    112 // The function builds a JS frame.  Please see JavaScriptFrameConstants in
    113 // frames-mips.h for its layout.
    114 void FullCodeGenerator::Generate() {
    115   CompilationInfo* info = info_;
    116   handler_table_ =
    117       isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
    118 
    119   profiling_counter_ = isolate()->factory()->NewCell(
    120       Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
    121   SetFunctionPosition(function());
    122   Comment cmnt(masm_, "[ function compiled by full code generator");
    123 
    124   ProfileEntryHookStub::MaybeCallEntryHook(masm_);
    125 
    126 #ifdef DEBUG
    127   if (strlen(FLAG_stop_at) > 0 &&
    128       info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
    129     __ stop("stop-at");
    130   }
    131 #endif
    132 
    133   // Sloppy mode functions and builtins need to replace the receiver with the
    134   // global proxy when called as functions (without an explicit receiver
    135   // object).
    136   if (info->strict_mode() == SLOPPY && !info->is_native()) {
    137     Label ok;
    138     int receiver_offset = info->scope()->num_parameters() * kPointerSize;
    139     __ lw(at, MemOperand(sp, receiver_offset));
    140     __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
    141     __ Branch(&ok, ne, a2, Operand(at));
    142 
    143     __ lw(a2, GlobalObjectOperand());
    144     __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalReceiverOffset));
    145 
    146     __ sw(a2, MemOperand(sp, receiver_offset));
    147 
    148     __ bind(&ok);
    149   }
    150 
    151   // Open a frame scope to indicate that there is a frame on the stack.  The
    152   // MANUAL indicates that the scope shouldn't actually generate code to set up
    153   // the frame (that is done below).
    154   FrameScope frame_scope(masm_, StackFrame::MANUAL);
    155 
    156   info->set_prologue_offset(masm_->pc_offset());
    157   __ Prologue(info->IsCodePreAgingActive());
    158   info->AddNoFrameRange(0, masm_->pc_offset());
    159 
    160   { Comment cmnt(masm_, "[ Allocate locals");
    161     int locals_count = info->scope()->num_stack_slots();
    162     // Generators allocate locals, if any, in context slots.
    163     ASSERT(!info->function()->is_generator() || locals_count == 0);
    164     if (locals_count > 0) {
    165       if (locals_count >= 128) {
    166         Label ok;
    167         __ Subu(t5, sp, Operand(locals_count * kPointerSize));
    168         __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
    169         __ Branch(&ok, hs, t5, Operand(a2));
    170         __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
    171         __ bind(&ok);
    172       }
    173       __ LoadRoot(t5, Heap::kUndefinedValueRootIndex);
    174       int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
    175       if (locals_count >= kMaxPushes) {
    176         int loop_iterations = locals_count / kMaxPushes;
    177         __ li(a2, Operand(loop_iterations));
    178         Label loop_header;
    179         __ bind(&loop_header);
    180         // Do pushes.
    181         __ Subu(sp, sp, Operand(kMaxPushes * kPointerSize));
    182         for (int i = 0; i < kMaxPushes; i++) {
    183           __ sw(t5, MemOperand(sp, i * kPointerSize));
    184         }
    185         // Continue loop if not done.
    186         __ Subu(a2, a2, Operand(1));
    187         __ Branch(&loop_header, ne, a2, Operand(zero_reg));
    188       }
    189       int remaining = locals_count % kMaxPushes;
    190       // Emit the remaining pushes.
    191       __ Subu(sp, sp, Operand(remaining * kPointerSize));
    192       for (int i  = 0; i < remaining; i++) {
    193         __ sw(t5, MemOperand(sp, i * kPointerSize));
    194       }
    195     }
    196   }
    197 
    198   bool function_in_register = true;
    199 
    200   // Possibly allocate a local context.
    201   int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
    202   if (heap_slots > 0) {
    203     Comment cmnt(masm_, "[ Allocate context");
    204     // Argument to NewContext is the function, which is still in a1.
    205     bool need_write_barrier = true;
    206     if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
    207       __ push(a1);
    208       __ Push(info->scope()->GetScopeInfo());
    209       __ CallRuntime(Runtime::kHiddenNewGlobalContext, 2);
    210     } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
    211       FastNewContextStub stub(isolate(), heap_slots);
    212       __ CallStub(&stub);
    213       // Result of FastNewContextStub is always in new space.
    214       need_write_barrier = false;
    215     } else {
    216       __ push(a1);
    217       __ CallRuntime(Runtime::kHiddenNewFunctionContext, 1);
    218     }
    219     function_in_register = false;
    220     // Context is returned in v0. It replaces the context passed to us.
    221     // It's saved in the stack and kept live in cp.
    222     __ mov(cp, v0);
    223     __ sw(v0, MemOperand(fp, StandardFrameConstants::kContextOffset));
    224     // Copy any necessary parameters into the context.
    225     int num_parameters = info->scope()->num_parameters();
    226     for (int i = 0; i < num_parameters; i++) {
    227       Variable* var = scope()->parameter(i);
    228       if (var->IsContextSlot()) {
    229         int parameter_offset = StandardFrameConstants::kCallerSPOffset +
    230                                  (num_parameters - 1 - i) * kPointerSize;
    231         // Load parameter from stack.
    232         __ lw(a0, MemOperand(fp, parameter_offset));
    233         // Store it in the context.
    234         MemOperand target = ContextOperand(cp, var->index());
    235         __ sw(a0, target);
    236 
    237         // Update the write barrier.
    238         if (need_write_barrier) {
    239           __ RecordWriteContextSlot(
    240               cp, target.offset(), a0, a3, kRAHasBeenSaved, kDontSaveFPRegs);
    241         } else if (FLAG_debug_code) {
    242           Label done;
    243           __ JumpIfInNewSpace(cp, a0, &done);
    244           __ Abort(kExpectedNewSpaceObject);
    245           __ bind(&done);
    246         }
    247       }
    248     }
    249   }
    250 
    251   Variable* arguments = scope()->arguments();
    252   if (arguments != NULL) {
    253     // Function uses arguments object.
    254     Comment cmnt(masm_, "[ Allocate arguments object");
    255     if (!function_in_register) {
    256       // Load this again, if it's used by the local context below.
    257       __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
    258     } else {
    259       __ mov(a3, a1);
    260     }
    261     // Receiver is just before the parameters on the caller's stack.
    262     int num_parameters = info->scope()->num_parameters();
    263     int offset = num_parameters * kPointerSize;
    264     __ Addu(a2, fp,
    265            Operand(StandardFrameConstants::kCallerSPOffset + offset));
    266     __ li(a1, Operand(Smi::FromInt(num_parameters)));
    267     __ Push(a3, a2, a1);
    268 
    269     // Arguments to ArgumentsAccessStub:
    270     //   function, receiver address, parameter count.
    271     // The stub will rewrite receiever and parameter count if the previous
    272     // stack frame was an arguments adapter frame.
    273     ArgumentsAccessStub::Type type;
    274     if (strict_mode() == STRICT) {
    275       type = ArgumentsAccessStub::NEW_STRICT;
    276     } else if (function()->has_duplicate_parameters()) {
    277       type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
    278     } else {
    279       type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
    280     }
    281     ArgumentsAccessStub stub(isolate(), type);
    282     __ CallStub(&stub);
    283 
    284     SetVar(arguments, v0, a1, a2);
    285   }
    286 
    287   if (FLAG_trace) {
    288     __ CallRuntime(Runtime::kTraceEnter, 0);
    289   }
    290 
    291   // Visit the declarations and body unless there is an illegal
    292   // redeclaration.
    293   if (scope()->HasIllegalRedeclaration()) {
    294     Comment cmnt(masm_, "[ Declarations");
    295     scope()->VisitIllegalRedeclaration(this);
    296 
    297   } else {
    298     PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
    299     { Comment cmnt(masm_, "[ Declarations");
    300       // For named function expressions, declare the function name as a
    301       // constant.
    302       if (scope()->is_function_scope() && scope()->function() != NULL) {
    303         VariableDeclaration* function = scope()->function();
    304         ASSERT(function->proxy()->var()->mode() == CONST ||
    305                function->proxy()->var()->mode() == CONST_LEGACY);
    306         ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
    307         VisitVariableDeclaration(function);
    308       }
    309       VisitDeclarations(scope()->declarations());
    310     }
    311 
    312     { Comment cmnt(masm_, "[ Stack check");
    313       PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
    314       Label ok;
    315       __ LoadRoot(at, Heap::kStackLimitRootIndex);
    316       __ Branch(&ok, hs, sp, Operand(at));
    317       Handle<Code> stack_check = isolate()->builtins()->StackCheck();
    318       PredictableCodeSizeScope predictable(masm_,
    319           masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
    320       __ Call(stack_check, RelocInfo::CODE_TARGET);
    321       __ bind(&ok);
    322     }
    323 
    324     { Comment cmnt(masm_, "[ Body");
    325       ASSERT(loop_depth() == 0);
    326       VisitStatements(function()->body());
    327       ASSERT(loop_depth() == 0);
    328     }
    329   }
    330 
    331   // Always emit a 'return undefined' in case control fell off the end of
    332   // the body.
    333   { Comment cmnt(masm_, "[ return <undefined>;");
    334     __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
    335   }
    336   EmitReturnSequence();
    337 }
    338 
    339 
    340 void FullCodeGenerator::ClearAccumulator() {
    341   ASSERT(Smi::FromInt(0) == 0);
    342   __ mov(v0, zero_reg);
    343 }
    344 
    345 
    346 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
    347   __ li(a2, Operand(profiling_counter_));
    348   __ lw(a3, FieldMemOperand(a2, Cell::kValueOffset));
    349   __ Subu(a3, a3, Operand(Smi::FromInt(delta)));
    350   __ sw(a3, FieldMemOperand(a2, Cell::kValueOffset));
    351 }
    352 
    353 
    354 void FullCodeGenerator::EmitProfilingCounterReset() {
    355   int reset_value = FLAG_interrupt_budget;
    356   if (info_->is_debug()) {
    357     // Detect debug break requests as soon as possible.
    358     reset_value = FLAG_interrupt_budget >> 4;
    359   }
    360   __ li(a2, Operand(profiling_counter_));
    361   __ li(a3, Operand(Smi::FromInt(reset_value)));
    362   __ sw(a3, FieldMemOperand(a2, Cell::kValueOffset));
    363 }
    364 
    365 
    366 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
    367                                                 Label* back_edge_target) {
    368   // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need
    369   // to make sure it is constant. Branch may emit a skip-or-jump sequence
    370   // instead of the normal Branch. It seems that the "skip" part of that
    371   // sequence is about as long as this Branch would be so it is safe to ignore
    372   // that.
    373   Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
    374   Comment cmnt(masm_, "[ Back edge bookkeeping");
    375   Label ok;
    376   ASSERT(back_edge_target->is_bound());
    377   int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
    378   int weight = Min(kMaxBackEdgeWeight,
    379                    Max(1, distance / kCodeSizeMultiplier));
    380   EmitProfilingCounterDecrement(weight);
    381   __ slt(at, a3, zero_reg);
    382   __ beq(at, zero_reg, &ok);
    383   // Call will emit a li t9 first, so it is safe to use the delay slot.
    384   __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
    385   // Record a mapping of this PC offset to the OSR id.  This is used to find
    386   // the AST id from the unoptimized code in order to use it as a key into
    387   // the deoptimization input data found in the optimized code.
    388   RecordBackEdge(stmt->OsrEntryId());
    389   EmitProfilingCounterReset();
    390 
    391   __ bind(&ok);
    392   PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
    393   // Record a mapping of the OSR id to this PC.  This is used if the OSR
    394   // entry becomes the target of a bailout.  We don't expect it to be, but
    395   // we want it to work if it is.
    396   PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
    397 }
    398 
    399 
    400 void FullCodeGenerator::EmitReturnSequence() {
    401   Comment cmnt(masm_, "[ Return sequence");
    402   if (return_label_.is_bound()) {
    403     __ Branch(&return_label_);
    404   } else {
    405     __ bind(&return_label_);
    406     if (FLAG_trace) {
    407       // Push the return value on the stack as the parameter.
    408       // Runtime::TraceExit returns its parameter in v0.
    409       __ push(v0);
    410       __ CallRuntime(Runtime::kTraceExit, 1);
    411     }
    412     // Pretend that the exit is a backwards jump to the entry.
    413     int weight = 1;
    414     if (info_->ShouldSelfOptimize()) {
    415       weight = FLAG_interrupt_budget / FLAG_self_opt_count;
    416     } else {
    417       int distance = masm_->pc_offset();
    418       weight = Min(kMaxBackEdgeWeight,
    419                    Max(1, distance / kCodeSizeMultiplier));
    420     }
    421     EmitProfilingCounterDecrement(weight);
    422     Label ok;
    423     __ Branch(&ok, ge, a3, Operand(zero_reg));
    424     __ push(v0);
    425     __ Call(isolate()->builtins()->InterruptCheck(),
    426             RelocInfo::CODE_TARGET);
    427     __ pop(v0);
    428     EmitProfilingCounterReset();
    429     __ bind(&ok);
    430 
    431 #ifdef DEBUG
    432     // Add a label for checking the size of the code used for returning.
    433     Label check_exit_codesize;
    434     masm_->bind(&check_exit_codesize);
    435 #endif
    436     // Make sure that the constant pool is not emitted inside of the return
    437     // sequence.
    438     { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
    439       // Here we use masm_-> instead of the __ macro to avoid the code coverage
    440       // tool from instrumenting as we rely on the code size here.
    441       int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize;
    442       CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
    443       __ RecordJSReturn();
    444       masm_->mov(sp, fp);
    445       int no_frame_start = masm_->pc_offset();
    446       masm_->MultiPop(static_cast<RegList>(fp.bit() | ra.bit()));
    447       masm_->Addu(sp, sp, Operand(sp_delta));
    448       masm_->Jump(ra);
    449       info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
    450     }
    451 
    452 #ifdef DEBUG
    453     // Check that the size of the code used for returning is large enough
    454     // for the debugger's requirements.
    455     ASSERT(Assembler::kJSReturnSequenceInstructions <=
    456            masm_->InstructionsGeneratedSince(&check_exit_codesize));
    457 #endif
    458   }
    459 }
    460 
    461 
    462 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
    463   ASSERT(var->IsStackAllocated() || var->IsContextSlot());
    464 }
    465 
    466 
    467 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
    468   ASSERT(var->IsStackAllocated() || var->IsContextSlot());
    469   codegen()->GetVar(result_register(), var);
    470 }
    471 
    472 
    473 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
    474   ASSERT(var->IsStackAllocated() || var->IsContextSlot());
    475   codegen()->GetVar(result_register(), var);
    476   __ push(result_register());
    477 }
    478 
    479 
    480 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
    481   // For simplicity we always test the accumulator register.
    482   codegen()->GetVar(result_register(), var);
    483   codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
    484   codegen()->DoTest(this);
    485 }
    486 
    487 
    488 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
    489 }
    490 
    491 
    492 void FullCodeGenerator::AccumulatorValueContext::Plug(
    493     Heap::RootListIndex index) const {
    494   __ LoadRoot(result_register(), index);
    495 }
    496 
    497 
    498 void FullCodeGenerator::StackValueContext::Plug(
    499     Heap::RootListIndex index) const {
    500   __ LoadRoot(result_register(), index);
    501   __ push(result_register());
    502 }
    503 
    504 
    505 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
    506   codegen()->PrepareForBailoutBeforeSplit(condition(),
    507                                           true,
    508                                           true_label_,
    509                                           false_label_);
    510   if (index == Heap::kUndefinedValueRootIndex ||
    511       index == Heap::kNullValueRootIndex ||
    512       index == Heap::kFalseValueRootIndex) {
    513     if (false_label_ != fall_through_) __ Branch(false_label_);
    514   } else if (index == Heap::kTrueValueRootIndex) {
    515     if (true_label_ != fall_through_) __ Branch(true_label_);
    516   } else {
    517     __ LoadRoot(result_register(), index);
    518     codegen()->DoTest(this);
    519   }
    520 }
    521 
    522 
    523 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
    524 }
    525 
    526 
    527 void FullCodeGenerator::AccumulatorValueContext::Plug(
    528     Handle<Object> lit) const {
    529   __ li(result_register(), Operand(lit));
    530 }
    531 
    532 
    533 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
    534   // Immediates cannot be pushed directly.
    535   __ li(result_register(), Operand(lit));
    536   __ push(result_register());
    537 }
    538 
    539 
    540 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
    541   codegen()->PrepareForBailoutBeforeSplit(condition(),
    542                                           true,
    543                                           true_label_,
    544                                           false_label_);
    545   ASSERT(!lit->IsUndetectableObject());  // There are no undetectable literals.
    546   if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
    547     if (false_label_ != fall_through_) __ Branch(false_label_);
    548   } else if (lit->IsTrue() || lit->IsJSObject()) {
    549     if (true_label_ != fall_through_) __ Branch(true_label_);
    550   } else if (lit->IsString()) {
    551     if (String::cast(*lit)->length() == 0) {
    552       if (false_label_ != fall_through_) __ Branch(false_label_);
    553     } else {
    554       if (true_label_ != fall_through_) __ Branch(true_label_);
    555     }
    556   } else if (lit->IsSmi()) {
    557     if (Smi::cast(*lit)->value() == 0) {
    558       if (false_label_ != fall_through_) __ Branch(false_label_);
    559     } else {
    560       if (true_label_ != fall_through_) __ Branch(true_label_);
    561     }
    562   } else {
    563     // For simplicity we always test the accumulator register.
    564     __ li(result_register(), Operand(lit));
    565     codegen()->DoTest(this);
    566   }
    567 }
    568 
    569 
    570 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
    571                                                    Register reg) const {
    572   ASSERT(count > 0);
    573   __ Drop(count);
    574 }
    575 
    576 
    577 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
    578     int count,
    579     Register reg) const {
    580   ASSERT(count > 0);
    581   __ Drop(count);
    582   __ Move(result_register(), reg);
    583 }
    584 
    585 
    586 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
    587                                                        Register reg) const {
    588   ASSERT(count > 0);
    589   if (count > 1) __ Drop(count - 1);
    590   __ sw(reg, MemOperand(sp, 0));
    591 }
    592 
    593 
    594 void FullCodeGenerator::TestContext::DropAndPlug(int count,
    595                                                  Register reg) const {
    596   ASSERT(count > 0);
    597   // For simplicity we always test the accumulator register.
    598   __ Drop(count);
    599   __ Move(result_register(), reg);
    600   codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
    601   codegen()->DoTest(this);
    602 }
    603 
    604 
    605 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
    606                                             Label* materialize_false) const {
    607   ASSERT(materialize_true == materialize_false);
    608   __ bind(materialize_true);
    609 }
    610 
    611 
    612 void FullCodeGenerator::AccumulatorValueContext::Plug(
    613     Label* materialize_true,
    614     Label* materialize_false) const {
    615   Label done;
    616   __ bind(materialize_true);
    617   __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
    618   __ Branch(&done);
    619   __ bind(materialize_false);
    620   __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
    621   __ bind(&done);
    622 }
    623 
    624 
    625 void FullCodeGenerator::StackValueContext::Plug(
    626     Label* materialize_true,
    627     Label* materialize_false) const {
    628   Label done;
    629   __ bind(materialize_true);
    630   __ LoadRoot(at, Heap::kTrueValueRootIndex);
    631   // Push the value as the following branch can clobber at in long branch mode.
    632   __ push(at);
    633   __ Branch(&done);
    634   __ bind(materialize_false);
    635   __ LoadRoot(at, Heap::kFalseValueRootIndex);
    636   __ push(at);
    637   __ bind(&done);
    638 }
    639 
    640 
    641 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
    642                                           Label* materialize_false) const {
    643   ASSERT(materialize_true == true_label_);
    644   ASSERT(materialize_false == false_label_);
    645 }
    646 
    647 
    648 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
    649 }
    650 
    651 
    652 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
    653   Heap::RootListIndex value_root_index =
    654       flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
    655   __ LoadRoot(result_register(), value_root_index);
    656 }
    657 
    658 
    659 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
    660   Heap::RootListIndex value_root_index =
    661       flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
    662   __ LoadRoot(at, value_root_index);
    663   __ push(at);
    664 }
    665 
    666 
    667 void FullCodeGenerator::TestContext::Plug(bool flag) const {
    668   codegen()->PrepareForBailoutBeforeSplit(condition(),
    669                                           true,
    670                                           true_label_,
    671                                           false_label_);
    672   if (flag) {
    673     if (true_label_ != fall_through_) __ Branch(true_label_);
    674   } else {
    675     if (false_label_ != fall_through_) __ Branch(false_label_);
    676   }
    677 }
    678 
    679 
    680 void FullCodeGenerator::DoTest(Expression* condition,
    681                                Label* if_true,
    682                                Label* if_false,
    683                                Label* fall_through) {
    684   __ mov(a0, result_register());
    685   Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
    686   CallIC(ic, condition->test_id());
    687   __ mov(at, zero_reg);
    688   Split(ne, v0, Operand(at), if_true, if_false, fall_through);
    689 }
    690 
    691 
    692 void FullCodeGenerator::Split(Condition cc,
    693                               Register lhs,
    694                               const Operand&  rhs,
    695                               Label* if_true,
    696                               Label* if_false,
    697                               Label* fall_through) {
    698   if (if_false == fall_through) {
    699     __ Branch(if_true, cc, lhs, rhs);
    700   } else if (if_true == fall_through) {
    701     __ Branch(if_false, NegateCondition(cc), lhs, rhs);
    702   } else {
    703     __ Branch(if_true, cc, lhs, rhs);
    704     __ Branch(if_false);
    705   }
    706 }
    707 
    708 
    709 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
    710   ASSERT(var->IsStackAllocated());
    711   // Offset is negative because higher indexes are at lower addresses.
    712   int offset = -var->index() * kPointerSize;
    713   // Adjust by a (parameter or local) base offset.
    714   if (var->IsParameter()) {
    715     offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
    716   } else {
    717     offset += JavaScriptFrameConstants::kLocal0Offset;
    718   }
    719   return MemOperand(fp, offset);
    720 }
    721 
    722 
    723 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
    724   ASSERT(var->IsContextSlot() || var->IsStackAllocated());
    725   if (var->IsContextSlot()) {
    726     int context_chain_length = scope()->ContextChainLength(var->scope());
    727     __ LoadContext(scratch, context_chain_length);
    728     return ContextOperand(scratch, var->index());
    729   } else {
    730     return StackOperand(var);
    731   }
    732 }
    733 
    734 
    735 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
    736   // Use destination as scratch.
    737   MemOperand location = VarOperand(var, dest);
    738   __ lw(dest, location);
    739 }
    740 
    741 
    742 void FullCodeGenerator::SetVar(Variable* var,
    743                                Register src,
    744                                Register scratch0,
    745                                Register scratch1) {
    746   ASSERT(var->IsContextSlot() || var->IsStackAllocated());
    747   ASSERT(!scratch0.is(src));
    748   ASSERT(!scratch0.is(scratch1));
    749   ASSERT(!scratch1.is(src));
    750   MemOperand location = VarOperand(var, scratch0);
    751   __ sw(src, location);
    752   // Emit the write barrier code if the location is in the heap.
    753   if (var->IsContextSlot()) {
    754     __ RecordWriteContextSlot(scratch0,
    755                               location.offset(),
    756                               src,
    757                               scratch1,
    758                               kRAHasBeenSaved,
    759                               kDontSaveFPRegs);
    760   }
    761 }
    762 
    763 
    764 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
    765                                                      bool should_normalize,
    766                                                      Label* if_true,
    767                                                      Label* if_false) {
    768   // Only prepare for bailouts before splits if we're in a test
    769   // context. Otherwise, we let the Visit function deal with the
    770   // preparation to avoid preparing with the same AST id twice.
    771   if (!context()->IsTest() || !info_->IsOptimizable()) return;
    772 
    773   Label skip;
    774   if (should_normalize) __ Branch(&skip);
    775   PrepareForBailout(expr, TOS_REG);
    776   if (should_normalize) {
    777     __ LoadRoot(t0, Heap::kTrueValueRootIndex);
    778     Split(eq, a0, Operand(t0), if_true, if_false, NULL);
    779     __ bind(&skip);
    780   }
    781 }
    782 
    783 
    784 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
    785   // The variable in the declaration always resides in the current function
    786   // context.
    787   ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
    788   if (generate_debug_code_) {
    789     // Check that we're not inside a with or catch context.
    790     __ lw(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
    791     __ LoadRoot(t0, Heap::kWithContextMapRootIndex);
    792     __ Check(ne, kDeclarationInWithContext,
    793         a1, Operand(t0));
    794     __ LoadRoot(t0, Heap::kCatchContextMapRootIndex);
    795     __ Check(ne, kDeclarationInCatchContext,
    796         a1, Operand(t0));
    797   }
    798 }
    799 
    800 
    801 void FullCodeGenerator::VisitVariableDeclaration(
    802     VariableDeclaration* declaration) {
    803   // If it was not possible to allocate the variable at compile time, we
    804   // need to "declare" it at runtime to make sure it actually exists in the
    805   // local context.
    806   VariableProxy* proxy = declaration->proxy();
    807   VariableMode mode = declaration->mode();
    808   Variable* variable = proxy->var();
    809   bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
    810   switch (variable->location()) {
    811     case Variable::UNALLOCATED:
    812       globals_->Add(variable->name(), zone());
    813       globals_->Add(variable->binding_needs_init()
    814                         ? isolate()->factory()->the_hole_value()
    815                         : isolate()->factory()->undefined_value(),
    816                     zone());
    817       break;
    818 
    819     case Variable::PARAMETER:
    820     case Variable::LOCAL:
    821       if (hole_init) {
    822         Comment cmnt(masm_, "[ VariableDeclaration");
    823         __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
    824         __ sw(t0, StackOperand(variable));
    825       }
    826       break;
    827 
    828       case Variable::CONTEXT:
    829       if (hole_init) {
    830         Comment cmnt(masm_, "[ VariableDeclaration");
    831         EmitDebugCheckDeclarationContext(variable);
    832           __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
    833           __ sw(at, ContextOperand(cp, variable->index()));
    834           // No write barrier since the_hole_value is in old space.
    835           PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
    836       }
    837       break;
    838 
    839     case Variable::LOOKUP: {
    840       Comment cmnt(masm_, "[ VariableDeclaration");
    841       __ li(a2, Operand(variable->name()));
    842       // Declaration nodes are always introduced in one of four modes.
    843       ASSERT(IsDeclaredVariableMode(mode));
    844       PropertyAttributes attr =
    845           IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
    846       __ li(a1, Operand(Smi::FromInt(attr)));
    847       // Push initial value, if any.
    848       // Note: For variables we must not push an initial value (such as
    849       // 'undefined') because we may have a (legal) redeclaration and we
    850       // must not destroy the current value.
    851       if (hole_init) {
    852         __ LoadRoot(a0, Heap::kTheHoleValueRootIndex);
    853         __ Push(cp, a2, a1, a0);
    854       } else {
    855         ASSERT(Smi::FromInt(0) == 0);
    856         __ mov(a0, zero_reg);  // Smi::FromInt(0) indicates no initial value.
    857         __ Push(cp, a2, a1, a0);
    858       }
    859       __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4);
    860       break;
    861     }
    862   }
    863 }
    864 
    865 
    866 void FullCodeGenerator::VisitFunctionDeclaration(
    867     FunctionDeclaration* declaration) {
    868   VariableProxy* proxy = declaration->proxy();
    869   Variable* variable = proxy->var();
    870   switch (variable->location()) {
    871     case Variable::UNALLOCATED: {
    872       globals_->Add(variable->name(), zone());
    873       Handle<SharedFunctionInfo> function =
    874           Compiler::BuildFunctionInfo(declaration->fun(), script());
    875       // Check for stack-overflow exception.
    876       if (function.is_null()) return SetStackOverflow();
    877       globals_->Add(function, zone());
    878       break;
    879     }
    880 
    881     case Variable::PARAMETER:
    882     case Variable::LOCAL: {
    883       Comment cmnt(masm_, "[ FunctionDeclaration");
    884       VisitForAccumulatorValue(declaration->fun());
    885       __ sw(result_register(), StackOperand(variable));
    886       break;
    887     }
    888 
    889     case Variable::CONTEXT: {
    890       Comment cmnt(masm_, "[ FunctionDeclaration");
    891       EmitDebugCheckDeclarationContext(variable);
    892       VisitForAccumulatorValue(declaration->fun());
    893       __ sw(result_register(), ContextOperand(cp, variable->index()));
    894       int offset = Context::SlotOffset(variable->index());
    895       // We know that we have written a function, which is not a smi.
    896       __ RecordWriteContextSlot(cp,
    897                                 offset,
    898                                 result_register(),
    899                                 a2,
    900                                 kRAHasBeenSaved,
    901                                 kDontSaveFPRegs,
    902                                 EMIT_REMEMBERED_SET,
    903                                 OMIT_SMI_CHECK);
    904       PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
    905       break;
    906     }
    907 
    908     case Variable::LOOKUP: {
    909       Comment cmnt(masm_, "[ FunctionDeclaration");
    910       __ li(a2, Operand(variable->name()));
    911       __ li(a1, Operand(Smi::FromInt(NONE)));
    912       __ Push(cp, a2, a1);
    913       // Push initial value for function declaration.
    914       VisitForStackValue(declaration->fun());
    915       __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4);
    916       break;
    917     }
    918   }
    919 }
    920 
    921 
    922 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
    923   Variable* variable = declaration->proxy()->var();
    924   ASSERT(variable->location() == Variable::CONTEXT);
    925   ASSERT(variable->interface()->IsFrozen());
    926 
    927   Comment cmnt(masm_, "[ ModuleDeclaration");
    928   EmitDebugCheckDeclarationContext(variable);
    929 
    930   // Load instance object.
    931   __ LoadContext(a1, scope_->ContextChainLength(scope_->GlobalScope()));
    932   __ lw(a1, ContextOperand(a1, variable->interface()->Index()));
    933   __ lw(a1, ContextOperand(a1, Context::EXTENSION_INDEX));
    934 
    935   // Assign it.
    936   __ sw(a1, ContextOperand(cp, variable->index()));
    937   // We know that we have written a module, which is not a smi.
    938   __ RecordWriteContextSlot(cp,
    939                             Context::SlotOffset(variable->index()),
    940                             a1,
    941                             a3,
    942                             kRAHasBeenSaved,
    943                             kDontSaveFPRegs,
    944                             EMIT_REMEMBERED_SET,
    945                             OMIT_SMI_CHECK);
    946   PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
    947 
    948   // Traverse into body.
    949   Visit(declaration->module());
    950 }
    951 
    952 
    953 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
    954   VariableProxy* proxy = declaration->proxy();
    955   Variable* variable = proxy->var();
    956   switch (variable->location()) {
    957     case Variable::UNALLOCATED:
    958       // TODO(rossberg)
    959       break;
    960 
    961     case Variable::CONTEXT: {
    962       Comment cmnt(masm_, "[ ImportDeclaration");
    963       EmitDebugCheckDeclarationContext(variable);
    964       // TODO(rossberg)
    965       break;
    966     }
    967 
    968     case Variable::PARAMETER:
    969     case Variable::LOCAL:
    970     case Variable::LOOKUP:
    971       UNREACHABLE();
    972   }
    973 }
    974 
    975 
    976 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
    977   // TODO(rossberg)
    978 }
    979 
    980 
    981 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
    982   // Call the runtime to declare the globals.
    983   // The context is the first argument.
    984   __ li(a1, Operand(pairs));
    985   __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
    986   __ Push(cp, a1, a0);
    987   __ CallRuntime(Runtime::kHiddenDeclareGlobals, 3);
    988   // Return value is ignored.
    989 }
    990 
    991 
    992 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
    993   // Call the runtime to declare the modules.
    994   __ Push(descriptions);
    995   __ CallRuntime(Runtime::kHiddenDeclareModules, 1);
    996   // Return value is ignored.
    997 }
    998 
    999 
   1000 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
   1001   Comment cmnt(masm_, "[ SwitchStatement");
   1002   Breakable nested_statement(this, stmt);
   1003   SetStatementPosition(stmt);
   1004 
   1005   // Keep the switch value on the stack until a case matches.
   1006   VisitForStackValue(stmt->tag());
   1007   PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
   1008 
   1009   ZoneList<CaseClause*>* clauses = stmt->cases();
   1010   CaseClause* default_clause = NULL;  // Can occur anywhere in the list.
   1011 
   1012   Label next_test;  // Recycled for each test.
   1013   // Compile all the tests with branches to their bodies.
   1014   for (int i = 0; i < clauses->length(); i++) {
   1015     CaseClause* clause = clauses->at(i);
   1016     clause->body_target()->Unuse();
   1017 
   1018     // The default is not a test, but remember it as final fall through.
   1019     if (clause->is_default()) {
   1020       default_clause = clause;
   1021       continue;
   1022     }
   1023 
   1024     Comment cmnt(masm_, "[ Case comparison");
   1025     __ bind(&next_test);
   1026     next_test.Unuse();
   1027 
   1028     // Compile the label expression.
   1029     VisitForAccumulatorValue(clause->label());
   1030     __ mov(a0, result_register());  // CompareStub requires args in a0, a1.
   1031 
   1032     // Perform the comparison as if via '==='.
   1033     __ lw(a1, MemOperand(sp, 0));  // Switch value.
   1034     bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
   1035     JumpPatchSite patch_site(masm_);
   1036     if (inline_smi_code) {
   1037       Label slow_case;
   1038       __ or_(a2, a1, a0);
   1039       patch_site.EmitJumpIfNotSmi(a2, &slow_case);
   1040 
   1041       __ Branch(&next_test, ne, a1, Operand(a0));
   1042       __ Drop(1);  // Switch value is no longer needed.
   1043       __ Branch(clause->body_target());
   1044 
   1045       __ bind(&slow_case);
   1046     }
   1047 
   1048     // Record position before stub call for type feedback.
   1049     SetSourcePosition(clause->position());
   1050     Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT);
   1051     CallIC(ic, clause->CompareId());
   1052     patch_site.EmitPatchInfo();
   1053 
   1054     Label skip;
   1055     __ Branch(&skip);
   1056     PrepareForBailout(clause, TOS_REG);
   1057     __ LoadRoot(at, Heap::kTrueValueRootIndex);
   1058     __ Branch(&next_test, ne, v0, Operand(at));
   1059     __ Drop(1);
   1060     __ Branch(clause->body_target());
   1061     __ bind(&skip);
   1062 
   1063     __ Branch(&next_test, ne, v0, Operand(zero_reg));
   1064     __ Drop(1);  // Switch value is no longer needed.
   1065     __ Branch(clause->body_target());
   1066   }
   1067 
   1068   // Discard the test value and jump to the default if present, otherwise to
   1069   // the end of the statement.
   1070   __ bind(&next_test);
   1071   __ Drop(1);  // Switch value is no longer needed.
   1072   if (default_clause == NULL) {
   1073     __ Branch(nested_statement.break_label());
   1074   } else {
   1075     __ Branch(default_clause->body_target());
   1076   }
   1077 
   1078   // Compile all the case bodies.
   1079   for (int i = 0; i < clauses->length(); i++) {
   1080     Comment cmnt(masm_, "[ Case body");
   1081     CaseClause* clause = clauses->at(i);
   1082     __ bind(clause->body_target());
   1083     PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
   1084     VisitStatements(clause->statements());
   1085   }
   1086 
   1087   __ bind(nested_statement.break_label());
   1088   PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
   1089 }
   1090 
   1091 
   1092 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
   1093   Comment cmnt(masm_, "[ ForInStatement");
   1094   int slot = stmt->ForInFeedbackSlot();
   1095   SetStatementPosition(stmt);
   1096 
   1097   Label loop, exit;
   1098   ForIn loop_statement(this, stmt);
   1099   increment_loop_depth();
   1100 
   1101   // Get the object to enumerate over. If the object is null or undefined, skip
   1102   // over the loop.  See ECMA-262 version 5, section 12.6.4.
   1103   VisitForAccumulatorValue(stmt->enumerable());
   1104   __ mov(a0, result_register());  // Result as param to InvokeBuiltin below.
   1105   __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
   1106   __ Branch(&exit, eq, a0, Operand(at));
   1107   Register null_value = t1;
   1108   __ LoadRoot(null_value, Heap::kNullValueRootIndex);
   1109   __ Branch(&exit, eq, a0, Operand(null_value));
   1110   PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
   1111   __ mov(a0, v0);
   1112   // Convert the object to a JS object.
   1113   Label convert, done_convert;
   1114   __ JumpIfSmi(a0, &convert);
   1115   __ GetObjectType(a0, a1, a1);
   1116   __ Branch(&done_convert, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
   1117   __ bind(&convert);
   1118   __ push(a0);
   1119   __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
   1120   __ mov(a0, v0);
   1121   __ bind(&done_convert);
   1122   __ push(a0);
   1123 
   1124   // Check for proxies.
   1125   Label call_runtime;
   1126   STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
   1127   __ GetObjectType(a0, a1, a1);
   1128   __ Branch(&call_runtime, le, a1, Operand(LAST_JS_PROXY_TYPE));
   1129 
   1130   // Check cache validity in generated code. This is a fast case for
   1131   // the JSObject::IsSimpleEnum cache validity checks. If we cannot
   1132   // guarantee cache validity, call the runtime system to check cache
   1133   // validity or get the property names in a fixed array.
   1134   __ CheckEnumCache(null_value, &call_runtime);
   1135 
   1136   // The enum cache is valid.  Load the map of the object being
   1137   // iterated over and use the cache for the iteration.
   1138   Label use_cache;
   1139   __ lw(v0, FieldMemOperand(a0, HeapObject::kMapOffset));
   1140   __ Branch(&use_cache);
   1141 
   1142   // Get the set of properties to enumerate.
   1143   __ bind(&call_runtime);
   1144   __ push(a0);  // Duplicate the enumerable object on the stack.
   1145   __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
   1146 
   1147   // If we got a map from the runtime call, we can do a fast
   1148   // modification check. Otherwise, we got a fixed array, and we have
   1149   // to do a slow check.
   1150   Label fixed_array;
   1151   __ lw(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
   1152   __ LoadRoot(at, Heap::kMetaMapRootIndex);
   1153   __ Branch(&fixed_array, ne, a2, Operand(at));
   1154 
   1155   // We got a map in register v0. Get the enumeration cache from it.
   1156   Label no_descriptors;
   1157   __ bind(&use_cache);
   1158 
   1159   __ EnumLength(a1, v0);
   1160   __ Branch(&no_descriptors, eq, a1, Operand(Smi::FromInt(0)));
   1161 
   1162   __ LoadInstanceDescriptors(v0, a2);
   1163   __ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheOffset));
   1164   __ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheBridgeCacheOffset));
   1165 
   1166   // Set up the four remaining stack slots.
   1167   __ li(a0, Operand(Smi::FromInt(0)));
   1168   // Push map, enumeration cache, enumeration cache length (as smi) and zero.
   1169   __ Push(v0, a2, a1, a0);
   1170   __ jmp(&loop);
   1171 
   1172   __ bind(&no_descriptors);
   1173   __ Drop(1);
   1174   __ jmp(&exit);
   1175 
   1176   // We got a fixed array in register v0. Iterate through that.
   1177   Label non_proxy;
   1178   __ bind(&fixed_array);
   1179 
   1180   __ li(a1, FeedbackVector());
   1181   __ li(a2, Operand(TypeFeedbackInfo::MegamorphicSentinel(isolate())));
   1182   __ sw(a2, FieldMemOperand(a1, FixedArray::OffsetOfElementAt(slot)));
   1183 
   1184   __ li(a1, Operand(Smi::FromInt(1)));  // Smi indicates slow check
   1185   __ lw(a2, MemOperand(sp, 0 * kPointerSize));  // Get enumerated object
   1186   STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
   1187   __ GetObjectType(a2, a3, a3);
   1188   __ Branch(&non_proxy, gt, a3, Operand(LAST_JS_PROXY_TYPE));
   1189   __ li(a1, Operand(Smi::FromInt(0)));  // Zero indicates proxy
   1190   __ bind(&non_proxy);
   1191   __ Push(a1, v0);  // Smi and array
   1192   __ lw(a1, FieldMemOperand(v0, FixedArray::kLengthOffset));
   1193   __ li(a0, Operand(Smi::FromInt(0)));
   1194   __ Push(a1, a0);  // Fixed array length (as smi) and initial index.
   1195 
   1196   // Generate code for doing the condition check.
   1197   PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
   1198   __ bind(&loop);
   1199   // Load the current count to a0, load the length to a1.
   1200   __ lw(a0, MemOperand(sp, 0 * kPointerSize));
   1201   __ lw(a1, MemOperand(sp, 1 * kPointerSize));
   1202   __ Branch(loop_statement.break_label(), hs, a0, Operand(a1));
   1203 
   1204   // Get the current entry of the array into register a3.
   1205   __ lw(a2, MemOperand(sp, 2 * kPointerSize));
   1206   __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
   1207   __ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize);
   1208   __ addu(t0, a2, t0);  // Array base + scaled (smi) index.
   1209   __ lw(a3, MemOperand(t0));  // Current entry.
   1210 
   1211   // Get the expected map from the stack or a smi in the
   1212   // permanent slow case into register a2.
   1213   __ lw(a2, MemOperand(sp, 3 * kPointerSize));
   1214 
   1215   // Check if the expected map still matches that of the enumerable.
   1216   // If not, we may have to filter the key.
   1217   Label update_each;
   1218   __ lw(a1, MemOperand(sp, 4 * kPointerSize));
   1219   __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset));
   1220   __ Branch(&update_each, eq, t0, Operand(a2));
   1221 
   1222   // For proxies, no filtering is done.
   1223   // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
   1224   ASSERT_EQ(Smi::FromInt(0), 0);
   1225   __ Branch(&update_each, eq, a2, Operand(zero_reg));
   1226 
   1227   // Convert the entry to a string or (smi) 0 if it isn't a property
   1228   // any more. If the property has been removed while iterating, we
   1229   // just skip it.
   1230   __ Push(a1, a3);  // Enumerable and current entry.
   1231   __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
   1232   __ mov(a3, result_register());
   1233   __ Branch(loop_statement.continue_label(), eq, a3, Operand(zero_reg));
   1234 
   1235   // Update the 'each' property or variable from the possibly filtered
   1236   // entry in register a3.
   1237   __ bind(&update_each);
   1238   __ mov(result_register(), a3);
   1239   // Perform the assignment as if via '='.
   1240   { EffectContext context(this);
   1241     EmitAssignment(stmt->each());
   1242   }
   1243 
   1244   // Generate code for the body of the loop.
   1245   Visit(stmt->body());
   1246 
   1247   // Generate code for the going to the next element by incrementing
   1248   // the index (smi) stored on top of the stack.
   1249   __ bind(loop_statement.continue_label());
   1250   __ pop(a0);
   1251   __ Addu(a0, a0, Operand(Smi::FromInt(1)));
   1252   __ push(a0);
   1253 
   1254   EmitBackEdgeBookkeeping(stmt, &loop);
   1255   __ Branch(&loop);
   1256 
   1257   // Remove the pointers stored on the stack.
   1258   __ bind(loop_statement.break_label());
   1259   __ Drop(5);
   1260 
   1261   // Exit and decrement the loop depth.
   1262   PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
   1263   __ bind(&exit);
   1264   decrement_loop_depth();
   1265 }
   1266 
   1267 
   1268 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
   1269   Comment cmnt(masm_, "[ ForOfStatement");
   1270   SetStatementPosition(stmt);
   1271 
   1272   Iteration loop_statement(this, stmt);
   1273   increment_loop_depth();
   1274 
   1275   // var iterable = subject
   1276   VisitForAccumulatorValue(stmt->assign_iterable());
   1277   __ mov(a0, v0);
   1278 
   1279   // As with for-in, skip the loop if the iterator is null or undefined.
   1280   __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
   1281   __ Branch(loop_statement.break_label(), eq, a0, Operand(at));
   1282   __ LoadRoot(at, Heap::kNullValueRootIndex);
   1283   __ Branch(loop_statement.break_label(), eq, a0, Operand(at));
   1284 
   1285   // var iterator = iterable[Symbol.iterator]();
   1286   VisitForEffect(stmt->assign_iterator());
   1287 
   1288   // Loop entry.
   1289   __ bind(loop_statement.continue_label());
   1290 
   1291   // result = iterator.next()
   1292   VisitForEffect(stmt->next_result());
   1293 
   1294   // if (result.done) break;
   1295   Label result_not_done;
   1296   VisitForControl(stmt->result_done(),
   1297                   loop_statement.break_label(),
   1298                   &result_not_done,
   1299                   &result_not_done);
   1300   __ bind(&result_not_done);
   1301 
   1302   // each = result.value
   1303   VisitForEffect(stmt->assign_each());
   1304 
   1305   // Generate code for the body of the loop.
   1306   Visit(stmt->body());
   1307 
   1308   // Check stack before looping.
   1309   PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
   1310   EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
   1311   __ jmp(loop_statement.continue_label());
   1312 
   1313   // Exit and decrement the loop depth.
   1314   PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
   1315   __ bind(loop_statement.break_label());
   1316   decrement_loop_depth();
   1317 }
   1318 
   1319 
   1320 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
   1321                                        bool pretenure) {
   1322   // Use the fast case closure allocation code that allocates in new
   1323   // space for nested functions that don't need literals cloning. If
   1324   // we're running with the --always-opt or the --prepare-always-opt
   1325   // flag, we need to use the runtime function so that the new function
   1326   // we are creating here gets a chance to have its code optimized and
   1327   // doesn't just get a copy of the existing unoptimized code.
   1328   if (!FLAG_always_opt &&
   1329       !FLAG_prepare_always_opt &&
   1330       !pretenure &&
   1331       scope()->is_function_scope() &&
   1332       info->num_literals() == 0) {
   1333     FastNewClosureStub stub(isolate(),
   1334                             info->strict_mode(),
   1335                             info->is_generator());
   1336     __ li(a2, Operand(info));
   1337     __ CallStub(&stub);
   1338   } else {
   1339     __ li(a0, Operand(info));
   1340     __ LoadRoot(a1, pretenure ? Heap::kTrueValueRootIndex
   1341                               : Heap::kFalseValueRootIndex);
   1342     __ Push(cp, a0, a1);
   1343     __ CallRuntime(Runtime::kHiddenNewClosure, 3);
   1344   }
   1345   context()->Plug(v0);
   1346 }
   1347 
   1348 
   1349 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
   1350   Comment cmnt(masm_, "[ VariableProxy");
   1351   EmitVariableLoad(expr);
   1352 }
   1353 
   1354 
   1355 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
   1356                                                       TypeofState typeof_state,
   1357                                                       Label* slow) {
   1358   Register current = cp;
   1359   Register next = a1;
   1360   Register temp = a2;
   1361 
   1362   Scope* s = scope();
   1363   while (s != NULL) {
   1364     if (s->num_heap_slots() > 0) {
   1365       if (s->calls_sloppy_eval()) {
   1366         // Check that extension is NULL.
   1367         __ lw(temp, ContextOperand(current, Context::EXTENSION_INDEX));
   1368         __ Branch(slow, ne, temp, Operand(zero_reg));
   1369       }
   1370       // Load next context in chain.
   1371       __ lw(next, ContextOperand(current, Context::PREVIOUS_INDEX));
   1372       // Walk the rest of the chain without clobbering cp.
   1373       current = next;
   1374     }
   1375     // If no outer scope calls eval, we do not need to check more
   1376     // context extensions.
   1377     if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
   1378     s = s->outer_scope();
   1379   }
   1380 
   1381   if (s->is_eval_scope()) {
   1382     Label loop, fast;
   1383     if (!current.is(next)) {
   1384       __ Move(next, current);
   1385     }
   1386     __ bind(&loop);
   1387     // Terminate at native context.
   1388     __ lw(temp, FieldMemOperand(next, HeapObject::kMapOffset));
   1389     __ LoadRoot(t0, Heap::kNativeContextMapRootIndex);
   1390     __ Branch(&fast, eq, temp, Operand(t0));
   1391     // Check that extension is NULL.
   1392     __ lw(temp, ContextOperand(next, Context::EXTENSION_INDEX));
   1393     __ Branch(slow, ne, temp, Operand(zero_reg));
   1394     // Load next context in chain.
   1395     __ lw(next, ContextOperand(next, Context::PREVIOUS_INDEX));
   1396     __ Branch(&loop);
   1397     __ bind(&fast);
   1398   }
   1399 
   1400   __ lw(a0, GlobalObjectOperand());
   1401   __ li(a2, Operand(var->name()));
   1402   ContextualMode mode = (typeof_state == INSIDE_TYPEOF)
   1403       ? NOT_CONTEXTUAL
   1404       : CONTEXTUAL;
   1405   CallLoadIC(mode);
   1406 }
   1407 
   1408 
   1409 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
   1410                                                                 Label* slow) {
   1411   ASSERT(var->IsContextSlot());
   1412   Register context = cp;
   1413   Register next = a3;
   1414   Register temp = t0;
   1415 
   1416   for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
   1417     if (s->num_heap_slots() > 0) {
   1418       if (s->calls_sloppy_eval()) {
   1419         // Check that extension is NULL.
   1420         __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX));
   1421         __ Branch(slow, ne, temp, Operand(zero_reg));
   1422       }
   1423       __ lw(next, ContextOperand(context, Context::PREVIOUS_INDEX));
   1424       // Walk the rest of the chain without clobbering cp.
   1425       context = next;
   1426     }
   1427   }
   1428   // Check that last extension is NULL.
   1429   __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX));
   1430   __ Branch(slow, ne, temp, Operand(zero_reg));
   1431 
   1432   // This function is used only for loads, not stores, so it's safe to
   1433   // return an cp-based operand (the write barrier cannot be allowed to
   1434   // destroy the cp register).
   1435   return ContextOperand(context, var->index());
   1436 }
   1437 
   1438 
   1439 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
   1440                                                   TypeofState typeof_state,
   1441                                                   Label* slow,
   1442                                                   Label* done) {
   1443   // Generate fast-case code for variables that might be shadowed by
   1444   // eval-introduced variables.  Eval is used a lot without
   1445   // introducing variables.  In those cases, we do not want to
   1446   // perform a runtime call for all variables in the scope
   1447   // containing the eval.
   1448   if (var->mode() == DYNAMIC_GLOBAL) {
   1449     EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
   1450     __ Branch(done);
   1451   } else if (var->mode() == DYNAMIC_LOCAL) {
   1452     Variable* local = var->local_if_not_shadowed();
   1453     __ lw(v0, ContextSlotOperandCheckExtensions(local, slow));
   1454     if (local->mode() == LET || local->mode() == CONST ||
   1455         local->mode() == CONST_LEGACY) {
   1456       __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
   1457       __ subu(at, v0, at);  // Sub as compare: at == 0 on eq.
   1458       if (local->mode() == CONST_LEGACY) {
   1459         __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
   1460         __ Movz(v0, a0, at);  // Conditional move: return Undefined if TheHole.
   1461       } else {  // LET || CONST
   1462         __ Branch(done, ne, at, Operand(zero_reg));
   1463         __ li(a0, Operand(var->name()));
   1464         __ push(a0);
   1465         __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
   1466       }
   1467     }
   1468     __ Branch(done);
   1469   }
   1470 }
   1471 
   1472 
   1473 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
   1474   // Record position before possible IC call.
   1475   SetSourcePosition(proxy->position());
   1476   Variable* var = proxy->var();
   1477 
   1478   // Three cases: global variables, lookup variables, and all other types of
   1479   // variables.
   1480   switch (var->location()) {
   1481     case Variable::UNALLOCATED: {
   1482       Comment cmnt(masm_, "[ Global variable");
   1483       // Use inline caching. Variable name is passed in a2 and the global
   1484       // object (receiver) in a0.
   1485       __ lw(a0, GlobalObjectOperand());
   1486       __ li(a2, Operand(var->name()));
   1487       CallLoadIC(CONTEXTUAL);
   1488       context()->Plug(v0);
   1489       break;
   1490     }
   1491 
   1492     case Variable::PARAMETER:
   1493     case Variable::LOCAL:
   1494     case Variable::CONTEXT: {
   1495       Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
   1496                                                : "[ Stack variable");
   1497       if (var->binding_needs_init()) {
   1498         // var->scope() may be NULL when the proxy is located in eval code and
   1499         // refers to a potential outside binding. Currently those bindings are
   1500         // always looked up dynamically, i.e. in that case
   1501         //     var->location() == LOOKUP.
   1502         // always holds.
   1503         ASSERT(var->scope() != NULL);
   1504 
   1505         // Check if the binding really needs an initialization check. The check
   1506         // can be skipped in the following situation: we have a LET or CONST
   1507         // binding in harmony mode, both the Variable and the VariableProxy have
   1508         // the same declaration scope (i.e. they are both in global code, in the
   1509         // same function or in the same eval code) and the VariableProxy is in
   1510         // the source physically located after the initializer of the variable.
   1511         //
   1512         // We cannot skip any initialization checks for CONST in non-harmony
   1513         // mode because const variables may be declared but never initialized:
   1514         //   if (false) { const x; }; var y = x;
   1515         //
   1516         // The condition on the declaration scopes is a conservative check for
   1517         // nested functions that access a binding and are called before the
   1518         // binding is initialized:
   1519         //   function() { f(); let x = 1; function f() { x = 2; } }
   1520         //
   1521         bool skip_init_check;
   1522         if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
   1523           skip_init_check = false;
   1524         } else {
   1525           // Check that we always have valid source position.
   1526           ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
   1527           ASSERT(proxy->position() != RelocInfo::kNoPosition);
   1528           skip_init_check = var->mode() != CONST_LEGACY &&
   1529               var->initializer_position() < proxy->position();
   1530         }
   1531 
   1532         if (!skip_init_check) {
   1533           // Let and const need a read barrier.
   1534           GetVar(v0, var);
   1535           __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
   1536           __ subu(at, v0, at);  // Sub as compare: at == 0 on eq.
   1537           if (var->mode() == LET || var->mode() == CONST) {
   1538             // Throw a reference error when using an uninitialized let/const
   1539             // binding in harmony mode.
   1540             Label done;
   1541             __ Branch(&done, ne, at, Operand(zero_reg));
   1542             __ li(a0, Operand(var->name()));
   1543             __ push(a0);
   1544             __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
   1545             __ bind(&done);
   1546           } else {
   1547             // Uninitalized const bindings outside of harmony mode are unholed.
   1548             ASSERT(var->mode() == CONST_LEGACY);
   1549             __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
   1550             __ Movz(v0, a0, at);  // Conditional move: Undefined if TheHole.
   1551           }
   1552           context()->Plug(v0);
   1553           break;
   1554         }
   1555       }
   1556       context()->Plug(var);
   1557       break;
   1558     }
   1559 
   1560     case Variable::LOOKUP: {
   1561       Comment cmnt(masm_, "[ Lookup variable");
   1562       Label done, slow;
   1563       // Generate code for loading from variables potentially shadowed
   1564       // by eval-introduced variables.
   1565       EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
   1566       __ bind(&slow);
   1567       __ li(a1, Operand(var->name()));
   1568       __ Push(cp, a1);  // Context and name.
   1569       __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2);
   1570       __ bind(&done);
   1571       context()->Plug(v0);
   1572     }
   1573   }
   1574 }
   1575 
   1576 
   1577 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
   1578   Comment cmnt(masm_, "[ RegExpLiteral");
   1579   Label materialized;
   1580   // Registers will be used as follows:
   1581   // t1 = materialized value (RegExp literal)
   1582   // t0 = JS function, literals array
   1583   // a3 = literal index
   1584   // a2 = RegExp pattern
   1585   // a1 = RegExp flags
   1586   // a0 = RegExp literal clone
   1587   __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
   1588   __ lw(t0, FieldMemOperand(a0, JSFunction::kLiteralsOffset));
   1589   int literal_offset =
   1590       FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
   1591   __ lw(t1, FieldMemOperand(t0, literal_offset));
   1592   __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
   1593   __ Branch(&materialized, ne, t1, Operand(at));
   1594 
   1595   // Create regexp literal using runtime function.
   1596   // Result will be in v0.
   1597   __ li(a3, Operand(Smi::FromInt(expr->literal_index())));
   1598   __ li(a2, Operand(expr->pattern()));
   1599   __ li(a1, Operand(expr->flags()));
   1600   __ Push(t0, a3, a2, a1);
   1601   __ CallRuntime(Runtime::kHiddenMaterializeRegExpLiteral, 4);
   1602   __ mov(t1, v0);
   1603 
   1604   __ bind(&materialized);
   1605   int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
   1606   Label allocated, runtime_allocate;
   1607   __ Allocate(size, v0, a2, a3, &runtime_allocate, TAG_OBJECT);
   1608   __ jmp(&allocated);
   1609 
   1610   __ bind(&runtime_allocate);
   1611   __ li(a0, Operand(Smi::FromInt(size)));
   1612   __ Push(t1, a0);
   1613   __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1);
   1614   __ pop(t1);
   1615 
   1616   __ bind(&allocated);
   1617 
   1618   // After this, registers are used as follows:
   1619   // v0: Newly allocated regexp.
   1620   // t1: Materialized regexp.
   1621   // a2: temp.
   1622   __ CopyFields(v0, t1, a2.bit(), size / kPointerSize);
   1623   context()->Plug(v0);
   1624 }
   1625 
   1626 
   1627 void FullCodeGenerator::EmitAccessor(Expression* expression) {
   1628   if (expression == NULL) {
   1629     __ LoadRoot(a1, Heap::kNullValueRootIndex);
   1630     __ push(a1);
   1631   } else {
   1632     VisitForStackValue(expression);
   1633   }
   1634 }
   1635 
   1636 
   1637 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
   1638   Comment cmnt(masm_, "[ ObjectLiteral");
   1639 
   1640   expr->BuildConstantProperties(isolate());
   1641   Handle<FixedArray> constant_properties = expr->constant_properties();
   1642   __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
   1643   __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
   1644   __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
   1645   __ li(a1, Operand(constant_properties));
   1646   int flags = expr->fast_elements()
   1647       ? ObjectLiteral::kFastElements
   1648       : ObjectLiteral::kNoFlags;
   1649   flags |= expr->has_function()
   1650       ? ObjectLiteral::kHasFunction
   1651       : ObjectLiteral::kNoFlags;
   1652   __ li(a0, Operand(Smi::FromInt(flags)));
   1653   int properties_count = constant_properties->length() / 2;
   1654   if (expr->may_store_doubles() || expr->depth() > 1 ||
   1655       masm()->serializer_enabled() || flags != ObjectLiteral::kFastElements ||
   1656       properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
   1657     __ Push(a3, a2, a1, a0);
   1658     __ CallRuntime(Runtime::kHiddenCreateObjectLiteral, 4);
   1659   } else {
   1660     FastCloneShallowObjectStub stub(isolate(), properties_count);
   1661     __ CallStub(&stub);
   1662   }
   1663 
   1664   // If result_saved is true the result is on top of the stack.  If
   1665   // result_saved is false the result is in v0.
   1666   bool result_saved = false;
   1667 
   1668   // Mark all computed expressions that are bound to a key that
   1669   // is shadowed by a later occurrence of the same key. For the
   1670   // marked expressions, no store code is emitted.
   1671   expr->CalculateEmitStore(zone());
   1672 
   1673   AccessorTable accessor_table(zone());
   1674   for (int i = 0; i < expr->properties()->length(); i++) {
   1675     ObjectLiteral::Property* property = expr->properties()->at(i);
   1676     if (property->IsCompileTimeValue()) continue;
   1677 
   1678     Literal* key = property->key();
   1679     Expression* value = property->value();
   1680     if (!result_saved) {
   1681       __ push(v0);  // Save result on stack.
   1682       result_saved = true;
   1683     }
   1684     switch (property->kind()) {
   1685       case ObjectLiteral::Property::CONSTANT:
   1686         UNREACHABLE();
   1687       case ObjectLiteral::Property::MATERIALIZED_LITERAL:
   1688         ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value()));
   1689         // Fall through.
   1690       case ObjectLiteral::Property::COMPUTED:
   1691         if (key->value()->IsInternalizedString()) {
   1692           if (property->emit_store()) {
   1693             VisitForAccumulatorValue(value);
   1694             __ mov(a0, result_register());
   1695             __ li(a2, Operand(key->value()));
   1696             __ lw(a1, MemOperand(sp));
   1697             CallStoreIC(key->LiteralFeedbackId());
   1698             PrepareForBailoutForId(key->id(), NO_REGISTERS);
   1699           } else {
   1700             VisitForEffect(value);
   1701           }
   1702           break;
   1703         }
   1704         // Duplicate receiver on stack.
   1705         __ lw(a0, MemOperand(sp));
   1706         __ push(a0);
   1707         VisitForStackValue(key);
   1708         VisitForStackValue(value);
   1709         if (property->emit_store()) {
   1710           __ li(a0, Operand(Smi::FromInt(NONE)));  // PropertyAttributes.
   1711           __ push(a0);
   1712           __ CallRuntime(Runtime::kSetProperty, 4);
   1713         } else {
   1714           __ Drop(3);
   1715         }
   1716         break;
   1717       case ObjectLiteral::Property::PROTOTYPE:
   1718         // Duplicate receiver on stack.
   1719         __ lw(a0, MemOperand(sp));
   1720         __ push(a0);
   1721         VisitForStackValue(value);
   1722         if (property->emit_store()) {
   1723           __ CallRuntime(Runtime::kSetPrototype, 2);
   1724         } else {
   1725           __ Drop(2);
   1726         }
   1727         break;
   1728       case ObjectLiteral::Property::GETTER:
   1729         accessor_table.lookup(key)->second->getter = value;
   1730         break;
   1731       case ObjectLiteral::Property::SETTER:
   1732         accessor_table.lookup(key)->second->setter = value;
   1733         break;
   1734     }
   1735   }
   1736 
   1737   // Emit code to define accessors, using only a single call to the runtime for
   1738   // each pair of corresponding getters and setters.
   1739   for (AccessorTable::Iterator it = accessor_table.begin();
   1740        it != accessor_table.end();
   1741        ++it) {
   1742     __ lw(a0, MemOperand(sp));  // Duplicate receiver.
   1743     __ push(a0);
   1744     VisitForStackValue(it->first);
   1745     EmitAccessor(it->second->getter);
   1746     EmitAccessor(it->second->setter);
   1747     __ li(a0, Operand(Smi::FromInt(NONE)));
   1748     __ push(a0);
   1749     __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
   1750   }
   1751 
   1752   if (expr->has_function()) {
   1753     ASSERT(result_saved);
   1754     __ lw(a0, MemOperand(sp));
   1755     __ push(a0);
   1756     __ CallRuntime(Runtime::kToFastProperties, 1);
   1757   }
   1758 
   1759   if (result_saved) {
   1760     context()->PlugTOS();
   1761   } else {
   1762     context()->Plug(v0);
   1763   }
   1764 }
   1765 
   1766 
   1767 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
   1768   Comment cmnt(masm_, "[ ArrayLiteral");
   1769 
   1770   expr->BuildConstantElements(isolate());
   1771   int flags = expr->depth() == 1
   1772       ? ArrayLiteral::kShallowElements
   1773       : ArrayLiteral::kNoFlags;
   1774 
   1775   ZoneList<Expression*>* subexprs = expr->values();
   1776   int length = subexprs->length();
   1777 
   1778   Handle<FixedArray> constant_elements = expr->constant_elements();
   1779   ASSERT_EQ(2, constant_elements->length());
   1780   ElementsKind constant_elements_kind =
   1781       static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
   1782   bool has_fast_elements =
   1783       IsFastObjectElementsKind(constant_elements_kind);
   1784   Handle<FixedArrayBase> constant_elements_values(
   1785       FixedArrayBase::cast(constant_elements->get(1)));
   1786 
   1787   AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
   1788   if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
   1789     // If the only customer of allocation sites is transitioning, then
   1790     // we can turn it off if we don't have anywhere else to transition to.
   1791     allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
   1792   }
   1793 
   1794   __ mov(a0, result_register());
   1795   __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
   1796   __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
   1797   __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
   1798   __ li(a1, Operand(constant_elements));
   1799   if (expr->depth() > 1 || length > JSObject::kInitialMaxFastElementArray) {
   1800     __ li(a0, Operand(Smi::FromInt(flags)));
   1801     __ Push(a3, a2, a1, a0);
   1802     __ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);
   1803   } else {
   1804     FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
   1805     __ CallStub(&stub);
   1806   }
   1807 
   1808   bool result_saved = false;  // Is the result saved to the stack?
   1809 
   1810   // Emit code to evaluate all the non-constant subexpressions and to store
   1811   // them into the newly cloned array.
   1812   for (int i = 0; i < length; i++) {
   1813     Expression* subexpr = subexprs->at(i);
   1814     // If the subexpression is a literal or a simple materialized literal it
   1815     // is already set in the cloned array.
   1816     if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
   1817 
   1818     if (!result_saved) {
   1819       __ push(v0);  // array literal
   1820       __ Push(Smi::FromInt(expr->literal_index()));
   1821       result_saved = true;
   1822     }
   1823 
   1824     VisitForAccumulatorValue(subexpr);
   1825 
   1826     if (IsFastObjectElementsKind(constant_elements_kind)) {
   1827       int offset = FixedArray::kHeaderSize + (i * kPointerSize);
   1828       __ lw(t2, MemOperand(sp, kPointerSize));  // Copy of array literal.
   1829       __ lw(a1, FieldMemOperand(t2, JSObject::kElementsOffset));
   1830       __ sw(result_register(), FieldMemOperand(a1, offset));
   1831       // Update the write barrier for the array store.
   1832       __ RecordWriteField(a1, offset, result_register(), a2,
   1833                           kRAHasBeenSaved, kDontSaveFPRegs,
   1834                           EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
   1835     } else {
   1836       __ li(a3, Operand(Smi::FromInt(i)));
   1837       __ mov(a0, result_register());
   1838       StoreArrayLiteralElementStub stub(isolate());
   1839       __ CallStub(&stub);
   1840     }
   1841 
   1842     PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
   1843   }
   1844   if (result_saved) {
   1845     __ Pop();  // literal index
   1846     context()->PlugTOS();
   1847   } else {
   1848     context()->Plug(v0);
   1849   }
   1850 }
   1851 
   1852 
   1853 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
   1854   ASSERT(expr->target()->IsValidReferenceExpression());
   1855 
   1856   Comment cmnt(masm_, "[ Assignment");
   1857 
   1858   // Left-hand side can only be a property, a global or a (parameter or local)
   1859   // slot.
   1860   enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
   1861   LhsKind assign_type = VARIABLE;
   1862   Property* property = expr->target()->AsProperty();
   1863   if (property != NULL) {
   1864     assign_type = (property->key()->IsPropertyName())
   1865         ? NAMED_PROPERTY
   1866         : KEYED_PROPERTY;
   1867   }
   1868 
   1869   // Evaluate LHS expression.
   1870   switch (assign_type) {
   1871     case VARIABLE:
   1872       // Nothing to do here.
   1873       break;
   1874     case NAMED_PROPERTY:
   1875       if (expr->is_compound()) {
   1876         // We need the receiver both on the stack and in the accumulator.
   1877         VisitForAccumulatorValue(property->obj());
   1878         __ push(result_register());
   1879       } else {
   1880         VisitForStackValue(property->obj());
   1881       }
   1882       break;
   1883     case KEYED_PROPERTY:
   1884       // We need the key and receiver on both the stack and in v0 and a1.
   1885       if (expr->is_compound()) {
   1886         VisitForStackValue(property->obj());
   1887         VisitForAccumulatorValue(property->key());
   1888         __ lw(a1, MemOperand(sp, 0));
   1889         __ push(v0);
   1890       } else {
   1891         VisitForStackValue(property->obj());
   1892         VisitForStackValue(property->key());
   1893       }
   1894       break;
   1895   }
   1896 
   1897   // For compound assignments we need another deoptimization point after the
   1898   // variable/property load.
   1899   if (expr->is_compound()) {
   1900     { AccumulatorValueContext context(this);
   1901       switch (assign_type) {
   1902         case VARIABLE:
   1903           EmitVariableLoad(expr->target()->AsVariableProxy());
   1904           PrepareForBailout(expr->target(), TOS_REG);
   1905           break;
   1906         case NAMED_PROPERTY:
   1907           EmitNamedPropertyLoad(property);
   1908           PrepareForBailoutForId(property->LoadId(), TOS_REG);
   1909           break;
   1910         case KEYED_PROPERTY:
   1911           EmitKeyedPropertyLoad(property);
   1912           PrepareForBailoutForId(property->LoadId(), TOS_REG);
   1913           break;
   1914       }
   1915     }
   1916 
   1917     Token::Value op = expr->binary_op();
   1918     __ push(v0);  // Left operand goes on the stack.
   1919     VisitForAccumulatorValue(expr->value());
   1920 
   1921     OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
   1922         ? OVERWRITE_RIGHT
   1923         : NO_OVERWRITE;
   1924     SetSourcePosition(expr->position() + 1);
   1925     AccumulatorValueContext context(this);
   1926     if (ShouldInlineSmiCase(op)) {
   1927       EmitInlineSmiBinaryOp(expr->binary_operation(),
   1928                             op,
   1929                             mode,
   1930                             expr->target(),
   1931                             expr->value());
   1932     } else {
   1933       EmitBinaryOp(expr->binary_operation(), op, mode);
   1934     }
   1935 
   1936     // Deoptimization point in case the binary operation may have side effects.
   1937     PrepareForBailout(expr->binary_operation(), TOS_REG);
   1938   } else {
   1939     VisitForAccumulatorValue(expr->value());
   1940   }
   1941 
   1942   // Record source position before possible IC call.
   1943   SetSourcePosition(expr->position());
   1944 
   1945   // Store the value.
   1946   switch (assign_type) {
   1947     case VARIABLE:
   1948       EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
   1949                              expr->op());
   1950       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   1951       context()->Plug(v0);
   1952       break;
   1953     case NAMED_PROPERTY:
   1954       EmitNamedPropertyAssignment(expr);
   1955       break;
   1956     case KEYED_PROPERTY:
   1957       EmitKeyedPropertyAssignment(expr);
   1958       break;
   1959   }
   1960 }
   1961 
   1962 
   1963 void FullCodeGenerator::VisitYield(Yield* expr) {
   1964   Comment cmnt(masm_, "[ Yield");
   1965   // Evaluate yielded value first; the initial iterator definition depends on
   1966   // this.  It stays on the stack while we update the iterator.
   1967   VisitForStackValue(expr->expression());
   1968 
   1969   switch (expr->yield_kind()) {
   1970     case Yield::SUSPEND:
   1971       // Pop value from top-of-stack slot; box result into result register.
   1972       EmitCreateIteratorResult(false);
   1973       __ push(result_register());
   1974       // Fall through.
   1975     case Yield::INITIAL: {
   1976       Label suspend, continuation, post_runtime, resume;
   1977 
   1978       __ jmp(&suspend);
   1979 
   1980       __ bind(&continuation);
   1981       __ jmp(&resume);
   1982 
   1983       __ bind(&suspend);
   1984       VisitForAccumulatorValue(expr->generator_object());
   1985       ASSERT(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
   1986       __ li(a1, Operand(Smi::FromInt(continuation.pos())));
   1987       __ sw(a1, FieldMemOperand(v0, JSGeneratorObject::kContinuationOffset));
   1988       __ sw(cp, FieldMemOperand(v0, JSGeneratorObject::kContextOffset));
   1989       __ mov(a1, cp);
   1990       __ RecordWriteField(v0, JSGeneratorObject::kContextOffset, a1, a2,
   1991                           kRAHasBeenSaved, kDontSaveFPRegs);
   1992       __ Addu(a1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
   1993       __ Branch(&post_runtime, eq, sp, Operand(a1));
   1994       __ push(v0);  // generator object
   1995       __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1);
   1996       __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
   1997       __ bind(&post_runtime);
   1998       __ pop(result_register());
   1999       EmitReturnSequence();
   2000 
   2001       __ bind(&resume);
   2002       context()->Plug(result_register());
   2003       break;
   2004     }
   2005 
   2006     case Yield::FINAL: {
   2007       VisitForAccumulatorValue(expr->generator_object());
   2008       __ li(a1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
   2009       __ sw(a1, FieldMemOperand(result_register(),
   2010                                 JSGeneratorObject::kContinuationOffset));
   2011       // Pop value from top-of-stack slot, box result into result register.
   2012       EmitCreateIteratorResult(true);
   2013       EmitUnwindBeforeReturn();
   2014       EmitReturnSequence();
   2015       break;
   2016     }
   2017 
   2018     case Yield::DELEGATING: {
   2019       VisitForStackValue(expr->generator_object());
   2020 
   2021       // Initial stack layout is as follows:
   2022       // [sp + 1 * kPointerSize] iter
   2023       // [sp + 0 * kPointerSize] g
   2024 
   2025       Label l_catch, l_try, l_suspend, l_continuation, l_resume;
   2026       Label l_next, l_call, l_loop;
   2027       // Initial send value is undefined.
   2028       __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
   2029       __ Branch(&l_next);
   2030 
   2031       // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
   2032       __ bind(&l_catch);
   2033       __ mov(a0, v0);
   2034       handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
   2035       __ LoadRoot(a2, Heap::kthrow_stringRootIndex);  // "throw"
   2036       __ lw(a3, MemOperand(sp, 1 * kPointerSize));    // iter
   2037       __ Push(a2, a3, a0);                            // "throw", iter, except
   2038       __ jmp(&l_call);
   2039 
   2040       // try { received = %yield result }
   2041       // Shuffle the received result above a try handler and yield it without
   2042       // re-boxing.
   2043       __ bind(&l_try);
   2044       __ pop(a0);                                        // result
   2045       __ PushTryHandler(StackHandler::CATCH, expr->index());
   2046       const int handler_size = StackHandlerConstants::kSize;
   2047       __ push(a0);                                       // result
   2048       __ jmp(&l_suspend);
   2049       __ bind(&l_continuation);
   2050       __ mov(a0, v0);
   2051       __ jmp(&l_resume);
   2052       __ bind(&l_suspend);
   2053       const int generator_object_depth = kPointerSize + handler_size;
   2054       __ lw(a0, MemOperand(sp, generator_object_depth));
   2055       __ push(a0);                                       // g
   2056       ASSERT(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
   2057       __ li(a1, Operand(Smi::FromInt(l_continuation.pos())));
   2058       __ sw(a1, FieldMemOperand(a0, JSGeneratorObject::kContinuationOffset));
   2059       __ sw(cp, FieldMemOperand(a0, JSGeneratorObject::kContextOffset));
   2060       __ mov(a1, cp);
   2061       __ RecordWriteField(a0, JSGeneratorObject::kContextOffset, a1, a2,
   2062                           kRAHasBeenSaved, kDontSaveFPRegs);
   2063       __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1);
   2064       __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
   2065       __ pop(v0);                                      // result
   2066       EmitReturnSequence();
   2067       __ mov(a0, v0);
   2068       __ bind(&l_resume);                              // received in a0
   2069       __ PopTryHandler();
   2070 
   2071       // receiver = iter; f = 'next'; arg = received;
   2072       __ bind(&l_next);
   2073       __ LoadRoot(a2, Heap::knext_stringRootIndex);    // "next"
   2074       __ lw(a3, MemOperand(sp, 1 * kPointerSize));     // iter
   2075       __ Push(a2, a3, a0);                             // "next", iter, received
   2076 
   2077       // result = receiver[f](arg);
   2078       __ bind(&l_call);
   2079       __ lw(a1, MemOperand(sp, kPointerSize));
   2080       __ lw(a0, MemOperand(sp, 2 * kPointerSize));
   2081       Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
   2082       CallIC(ic, TypeFeedbackId::None());
   2083       __ mov(a0, v0);
   2084       __ mov(a1, a0);
   2085       __ sw(a1, MemOperand(sp, 2 * kPointerSize));
   2086       CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
   2087       __ CallStub(&stub);
   2088 
   2089       __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
   2090       __ Drop(1);  // The function is still on the stack; drop it.
   2091 
   2092       // if (!result.done) goto l_try;
   2093       __ bind(&l_loop);
   2094       __ mov(a0, v0);
   2095       __ push(a0);                                       // save result
   2096       __ LoadRoot(a2, Heap::kdone_stringRootIndex);      // "done"
   2097       CallLoadIC(NOT_CONTEXTUAL);                        // result.done in v0
   2098       __ mov(a0, v0);
   2099       Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
   2100       CallIC(bool_ic);
   2101       __ Branch(&l_try, eq, v0, Operand(zero_reg));
   2102 
   2103       // result.value
   2104       __ pop(a0);                                        // result
   2105       __ LoadRoot(a2, Heap::kvalue_stringRootIndex);     // "value"
   2106       CallLoadIC(NOT_CONTEXTUAL);                        // result.value in v0
   2107       context()->DropAndPlug(2, v0);                     // drop iter and g
   2108       break;
   2109     }
   2110   }
   2111 }
   2112 
   2113 
   2114 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
   2115     Expression *value,
   2116     JSGeneratorObject::ResumeMode resume_mode) {
   2117   // The value stays in a0, and is ultimately read by the resumed generator, as
   2118   // if CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject) returned it. Or it
   2119   // is read to throw the value when the resumed generator is already closed.
   2120   // a1 will hold the generator object until the activation has been resumed.
   2121   VisitForStackValue(generator);
   2122   VisitForAccumulatorValue(value);
   2123   __ pop(a1);
   2124 
   2125   // Check generator state.
   2126   Label wrong_state, closed_state, done;
   2127   __ lw(a3, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
   2128   STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0);
   2129   STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0);
   2130   __ Branch(&closed_state, eq, a3, Operand(zero_reg));
   2131   __ Branch(&wrong_state, lt, a3, Operand(zero_reg));
   2132 
   2133   // Load suspended function and context.
   2134   __ lw(cp, FieldMemOperand(a1, JSGeneratorObject::kContextOffset));
   2135   __ lw(t0, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
   2136 
   2137   // Load receiver and store as the first argument.
   2138   __ lw(a2, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset));
   2139   __ push(a2);
   2140 
   2141   // Push holes for the rest of the arguments to the generator function.
   2142   __ lw(a3, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset));
   2143   __ lw(a3,
   2144         FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
   2145   __ LoadRoot(a2, Heap::kTheHoleValueRootIndex);
   2146   Label push_argument_holes, push_frame;
   2147   __ bind(&push_argument_holes);
   2148   __ Subu(a3, a3, Operand(Smi::FromInt(1)));
   2149   __ Branch(&push_frame, lt, a3, Operand(zero_reg));
   2150   __ push(a2);
   2151   __ jmp(&push_argument_holes);
   2152 
   2153   // Enter a new JavaScript frame, and initialize its slots as they were when
   2154   // the generator was suspended.
   2155   Label resume_frame;
   2156   __ bind(&push_frame);
   2157   __ Call(&resume_frame);
   2158   __ jmp(&done);
   2159   __ bind(&resume_frame);
   2160   // ra = return address.
   2161   // fp = caller's frame pointer.
   2162   // cp = callee's context,
   2163   // t0 = callee's JS function.
   2164   __ Push(ra, fp, cp, t0);
   2165   // Adjust FP to point to saved FP.
   2166   __ Addu(fp, sp, 2 * kPointerSize);
   2167 
   2168   // Load the operand stack size.
   2169   __ lw(a3, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset));
   2170   __ lw(a3, FieldMemOperand(a3, FixedArray::kLengthOffset));
   2171   __ SmiUntag(a3);
   2172 
   2173   // If we are sending a value and there is no operand stack, we can jump back
   2174   // in directly.
   2175   if (resume_mode == JSGeneratorObject::NEXT) {
   2176     Label slow_resume;
   2177     __ Branch(&slow_resume, ne, a3, Operand(zero_reg));
   2178     __ lw(a3, FieldMemOperand(t0, JSFunction::kCodeEntryOffset));
   2179     __ lw(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
   2180     __ SmiUntag(a2);
   2181     __ Addu(a3, a3, Operand(a2));
   2182     __ li(a2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
   2183     __ sw(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
   2184     __ Jump(a3);
   2185     __ bind(&slow_resume);
   2186   }
   2187 
   2188   // Otherwise, we push holes for the operand stack and call the runtime to fix
   2189   // up the stack and the handlers.
   2190   Label push_operand_holes, call_resume;
   2191   __ bind(&push_operand_holes);
   2192   __ Subu(a3, a3, Operand(1));
   2193   __ Branch(&call_resume, lt, a3, Operand(zero_reg));
   2194   __ push(a2);
   2195   __ Branch(&push_operand_holes);
   2196   __ bind(&call_resume);
   2197   ASSERT(!result_register().is(a1));
   2198   __ Push(a1, result_register());
   2199   __ Push(Smi::FromInt(resume_mode));
   2200   __ CallRuntime(Runtime::kHiddenResumeJSGeneratorObject, 3);
   2201   // Not reached: the runtime call returns elsewhere.
   2202   __ stop("not-reached");
   2203 
   2204   // Reach here when generator is closed.
   2205   __ bind(&closed_state);
   2206   if (resume_mode == JSGeneratorObject::NEXT) {
   2207     // Return completed iterator result when generator is closed.
   2208     __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
   2209     __ push(a2);
   2210     // Pop value from top-of-stack slot; box result into result register.
   2211     EmitCreateIteratorResult(true);
   2212   } else {
   2213     // Throw the provided value.
   2214     __ push(a0);
   2215     __ CallRuntime(Runtime::kHiddenThrow, 1);
   2216   }
   2217   __ jmp(&done);
   2218 
   2219   // Throw error if we attempt to operate on a running generator.
   2220   __ bind(&wrong_state);
   2221   __ push(a1);
   2222   __ CallRuntime(Runtime::kHiddenThrowGeneratorStateError, 1);
   2223 
   2224   __ bind(&done);
   2225   context()->Plug(result_register());
   2226 }
   2227 
   2228 
   2229 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
   2230   Label gc_required;
   2231   Label allocated;
   2232 
   2233   Handle<Map> map(isolate()->native_context()->iterator_result_map());
   2234 
   2235   __ Allocate(map->instance_size(), v0, a2, a3, &gc_required, TAG_OBJECT);
   2236   __ jmp(&allocated);
   2237 
   2238   __ bind(&gc_required);
   2239   __ Push(Smi::FromInt(map->instance_size()));
   2240   __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1);
   2241   __ lw(context_register(),
   2242         MemOperand(fp, StandardFrameConstants::kContextOffset));
   2243 
   2244   __ bind(&allocated);
   2245   __ li(a1, Operand(map));
   2246   __ pop(a2);
   2247   __ li(a3, Operand(isolate()->factory()->ToBoolean(done)));
   2248   __ li(t0, Operand(isolate()->factory()->empty_fixed_array()));
   2249   ASSERT_EQ(map->instance_size(), 5 * kPointerSize);
   2250   __ sw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
   2251   __ sw(t0, FieldMemOperand(v0, JSObject::kPropertiesOffset));
   2252   __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset));
   2253   __ sw(a2,
   2254         FieldMemOperand(v0, JSGeneratorObject::kResultValuePropertyOffset));
   2255   __ sw(a3,
   2256         FieldMemOperand(v0, JSGeneratorObject::kResultDonePropertyOffset));
   2257 
   2258   // Only the value field needs a write barrier, as the other values are in the
   2259   // root set.
   2260   __ RecordWriteField(v0, JSGeneratorObject::kResultValuePropertyOffset,
   2261                       a2, a3, kRAHasBeenSaved, kDontSaveFPRegs);
   2262 }
   2263 
   2264 
   2265 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
   2266   SetSourcePosition(prop->position());
   2267   Literal* key = prop->key()->AsLiteral();
   2268   __ mov(a0, result_register());
   2269   __ li(a2, Operand(key->value()));
   2270   // Call load IC. It has arguments receiver and property name a0 and a2.
   2271   CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
   2272 }
   2273 
   2274 
   2275 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
   2276   SetSourcePosition(prop->position());
   2277   __ mov(a0, result_register());
   2278   // Call keyed load IC. It has arguments key and receiver in a0 and a1.
   2279   Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
   2280   CallIC(ic, prop->PropertyFeedbackId());
   2281 }
   2282 
   2283 
   2284 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
   2285                                               Token::Value op,
   2286                                               OverwriteMode mode,
   2287                                               Expression* left_expr,
   2288                                               Expression* right_expr) {
   2289   Label done, smi_case, stub_call;
   2290 
   2291   Register scratch1 = a2;
   2292   Register scratch2 = a3;
   2293 
   2294   // Get the arguments.
   2295   Register left = a1;
   2296   Register right = a0;
   2297   __ pop(left);
   2298   __ mov(a0, result_register());
   2299 
   2300   // Perform combined smi check on both operands.
   2301   __ Or(scratch1, left, Operand(right));
   2302   STATIC_ASSERT(kSmiTag == 0);
   2303   JumpPatchSite patch_site(masm_);
   2304   patch_site.EmitJumpIfSmi(scratch1, &smi_case);
   2305 
   2306   __ bind(&stub_call);
   2307   BinaryOpICStub stub(isolate(), op, mode);
   2308   CallIC(stub.GetCode(), expr->BinaryOperationFeedbackId());
   2309   patch_site.EmitPatchInfo();
   2310   __ jmp(&done);
   2311 
   2312   __ bind(&smi_case);
   2313   // Smi case. This code works the same way as the smi-smi case in the type
   2314   // recording binary operation stub, see
   2315   switch (op) {
   2316     case Token::SAR:
   2317       __ GetLeastBitsFromSmi(scratch1, right, 5);
   2318       __ srav(right, left, scratch1);
   2319       __ And(v0, right, Operand(~kSmiTagMask));
   2320       break;
   2321     case Token::SHL: {
   2322       __ SmiUntag(scratch1, left);
   2323       __ GetLeastBitsFromSmi(scratch2, right, 5);
   2324       __ sllv(scratch1, scratch1, scratch2);
   2325       __ Addu(scratch2, scratch1, Operand(0x40000000));
   2326       __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
   2327       __ SmiTag(v0, scratch1);
   2328       break;
   2329     }
   2330     case Token::SHR: {
   2331       __ SmiUntag(scratch1, left);
   2332       __ GetLeastBitsFromSmi(scratch2, right, 5);
   2333       __ srlv(scratch1, scratch1, scratch2);
   2334       __ And(scratch2, scratch1, 0xc0000000);
   2335       __ Branch(&stub_call, ne, scratch2, Operand(zero_reg));
   2336       __ SmiTag(v0, scratch1);
   2337       break;
   2338     }
   2339     case Token::ADD:
   2340       __ AdduAndCheckForOverflow(v0, left, right, scratch1);
   2341       __ BranchOnOverflow(&stub_call, scratch1);
   2342       break;
   2343     case Token::SUB:
   2344       __ SubuAndCheckForOverflow(v0, left, right, scratch1);
   2345       __ BranchOnOverflow(&stub_call, scratch1);
   2346       break;
   2347     case Token::MUL: {
   2348       __ SmiUntag(scratch1, right);
   2349       __ Mult(left, scratch1);
   2350       __ mflo(scratch1);
   2351       __ mfhi(scratch2);
   2352       __ sra(scratch1, scratch1, 31);
   2353       __ Branch(&stub_call, ne, scratch1, Operand(scratch2));
   2354       __ mflo(v0);
   2355       __ Branch(&done, ne, v0, Operand(zero_reg));
   2356       __ Addu(scratch2, right, left);
   2357       __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
   2358       ASSERT(Smi::FromInt(0) == 0);
   2359       __ mov(v0, zero_reg);
   2360       break;
   2361     }
   2362     case Token::BIT_OR:
   2363       __ Or(v0, left, Operand(right));
   2364       break;
   2365     case Token::BIT_AND:
   2366       __ And(v0, left, Operand(right));
   2367       break;
   2368     case Token::BIT_XOR:
   2369       __ Xor(v0, left, Operand(right));
   2370       break;
   2371     default:
   2372       UNREACHABLE();
   2373   }
   2374 
   2375   __ bind(&done);
   2376   context()->Plug(v0);
   2377 }
   2378 
   2379 
   2380 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
   2381                                      Token::Value op,
   2382                                      OverwriteMode mode) {
   2383   __ mov(a0, result_register());
   2384   __ pop(a1);
   2385   BinaryOpICStub stub(isolate(), op, mode);
   2386   JumpPatchSite patch_site(masm_);    // unbound, signals no inlined smi code.
   2387   CallIC(stub.GetCode(), expr->BinaryOperationFeedbackId());
   2388   patch_site.EmitPatchInfo();
   2389   context()->Plug(v0);
   2390 }
   2391 
   2392 
   2393 void FullCodeGenerator::EmitAssignment(Expression* expr) {
   2394   ASSERT(expr->IsValidReferenceExpression());
   2395 
   2396   // Left-hand side can only be a property, a global or a (parameter or local)
   2397   // slot.
   2398   enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
   2399   LhsKind assign_type = VARIABLE;
   2400   Property* prop = expr->AsProperty();
   2401   if (prop != NULL) {
   2402     assign_type = (prop->key()->IsPropertyName())
   2403         ? NAMED_PROPERTY
   2404         : KEYED_PROPERTY;
   2405   }
   2406 
   2407   switch (assign_type) {
   2408     case VARIABLE: {
   2409       Variable* var = expr->AsVariableProxy()->var();
   2410       EffectContext context(this);
   2411       EmitVariableAssignment(var, Token::ASSIGN);
   2412       break;
   2413     }
   2414     case NAMED_PROPERTY: {
   2415       __ push(result_register());  // Preserve value.
   2416       VisitForAccumulatorValue(prop->obj());
   2417       __ mov(a1, result_register());
   2418       __ pop(a0);  // Restore value.
   2419       __ li(a2, Operand(prop->key()->AsLiteral()->value()));
   2420       CallStoreIC();
   2421       break;
   2422     }
   2423     case KEYED_PROPERTY: {
   2424       __ push(result_register());  // Preserve value.
   2425       VisitForStackValue(prop->obj());
   2426       VisitForAccumulatorValue(prop->key());
   2427       __ mov(a1, result_register());
   2428       __ Pop(a0, a2);  // a0 = restored value.
   2429       Handle<Code> ic = strict_mode() == SLOPPY
   2430         ? isolate()->builtins()->KeyedStoreIC_Initialize()
   2431         : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
   2432       CallIC(ic);
   2433       break;
   2434     }
   2435   }
   2436   context()->Plug(v0);
   2437 }
   2438 
   2439 
   2440 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
   2441     Variable* var, MemOperand location) {
   2442   __ sw(result_register(), location);
   2443   if (var->IsContextSlot()) {
   2444     // RecordWrite may destroy all its register arguments.
   2445     __ Move(a3, result_register());
   2446     int offset = Context::SlotOffset(var->index());
   2447     __ RecordWriteContextSlot(
   2448         a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
   2449   }
   2450 }
   2451 
   2452 
   2453 void FullCodeGenerator::EmitCallStoreContextSlot(
   2454     Handle<String> name, StrictMode strict_mode) {
   2455   __ li(a1, Operand(name));
   2456   __ li(a0, Operand(Smi::FromInt(strict_mode)));
   2457   __ Push(v0, cp, a1, a0);  // Value, context, name, strict mode.
   2458   __ CallRuntime(Runtime::kHiddenStoreContextSlot, 4);
   2459 }
   2460 
   2461 
   2462 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op) {
   2463   if (var->IsUnallocated()) {
   2464     // Global var, const, or let.
   2465     __ mov(a0, result_register());
   2466     __ li(a2, Operand(var->name()));
   2467     __ lw(a1, GlobalObjectOperand());
   2468     CallStoreIC();
   2469 
   2470   } else if (op == Token::INIT_CONST_LEGACY) {
   2471     // Const initializers need a write barrier.
   2472     ASSERT(!var->IsParameter());  // No const parameters.
   2473     if (var->IsLookupSlot()) {
   2474       __ li(a0, Operand(var->name()));
   2475       __ Push(v0, cp, a0);  // Context and name.
   2476       __ CallRuntime(Runtime::kHiddenInitializeConstContextSlot, 3);
   2477     } else {
   2478       ASSERT(var->IsStackAllocated() || var->IsContextSlot());
   2479       Label skip;
   2480       MemOperand location = VarOperand(var, a1);
   2481       __ lw(a2, location);
   2482       __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
   2483       __ Branch(&skip, ne, a2, Operand(at));
   2484       EmitStoreToStackLocalOrContextSlot(var, location);
   2485       __ bind(&skip);
   2486     }
   2487 
   2488   } else if (var->mode() == LET && op != Token::INIT_LET) {
   2489     // Non-initializing assignment to let variable needs a write barrier.
   2490     if (var->IsLookupSlot()) {
   2491       EmitCallStoreContextSlot(var->name(), strict_mode());
   2492     } else {
   2493       ASSERT(var->IsStackAllocated() || var->IsContextSlot());
   2494       Label assign;
   2495       MemOperand location = VarOperand(var, a1);
   2496       __ lw(a3, location);
   2497       __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
   2498       __ Branch(&assign, ne, a3, Operand(t0));
   2499       __ li(a3, Operand(var->name()));
   2500       __ push(a3);
   2501       __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
   2502       // Perform the assignment.
   2503       __ bind(&assign);
   2504       EmitStoreToStackLocalOrContextSlot(var, location);
   2505     }
   2506 
   2507   } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
   2508     // Assignment to var or initializing assignment to let/const
   2509     // in harmony mode.
   2510     if (var->IsLookupSlot()) {
   2511       EmitCallStoreContextSlot(var->name(), strict_mode());
   2512     } else {
   2513       ASSERT((var->IsStackAllocated() || var->IsContextSlot()));
   2514       MemOperand location = VarOperand(var, a1);
   2515       if (generate_debug_code_ && op == Token::INIT_LET) {
   2516         // Check for an uninitialized let binding.
   2517         __ lw(a2, location);
   2518         __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
   2519         __ Check(eq, kLetBindingReInitialization, a2, Operand(t0));
   2520       }
   2521       EmitStoreToStackLocalOrContextSlot(var, location);
   2522     }
   2523   }
   2524   // Non-initializing assignments to consts are ignored.
   2525 }
   2526 
   2527 
   2528 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
   2529   // Assignment to a property, using a named store IC.
   2530   Property* prop = expr->target()->AsProperty();
   2531   ASSERT(prop != NULL);
   2532   ASSERT(prop->key()->IsLiteral());
   2533 
   2534   // Record source code position before IC call.
   2535   SetSourcePosition(expr->position());
   2536   __ mov(a0, result_register());  // Load the value.
   2537   __ li(a2, Operand(prop->key()->AsLiteral()->value()));
   2538   __ pop(a1);
   2539 
   2540   CallStoreIC(expr->AssignmentFeedbackId());
   2541 
   2542   PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   2543   context()->Plug(v0);
   2544 }
   2545 
   2546 
   2547 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
   2548   // Assignment to a property, using a keyed store IC.
   2549 
   2550   // Record source code position before IC call.
   2551   SetSourcePosition(expr->position());
   2552   // Call keyed store IC.
   2553   // The arguments are:
   2554   // - a0 is the value,
   2555   // - a1 is the key,
   2556   // - a2 is the receiver.
   2557   __ mov(a0, result_register());
   2558   __ Pop(a2, a1);  // a1 = key.
   2559 
   2560   Handle<Code> ic = strict_mode() == SLOPPY
   2561       ? isolate()->builtins()->KeyedStoreIC_Initialize()
   2562       : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
   2563   CallIC(ic, expr->AssignmentFeedbackId());
   2564 
   2565   PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   2566   context()->Plug(v0);
   2567 }
   2568 
   2569 
   2570 void FullCodeGenerator::VisitProperty(Property* expr) {
   2571   Comment cmnt(masm_, "[ Property");
   2572   Expression* key = expr->key();
   2573 
   2574   if (key->IsPropertyName()) {
   2575     VisitForAccumulatorValue(expr->obj());
   2576     EmitNamedPropertyLoad(expr);
   2577     PrepareForBailoutForId(expr->LoadId(), TOS_REG);
   2578     context()->Plug(v0);
   2579   } else {
   2580     VisitForStackValue(expr->obj());
   2581     VisitForAccumulatorValue(expr->key());
   2582     __ pop(a1);
   2583     EmitKeyedPropertyLoad(expr);
   2584     context()->Plug(v0);
   2585   }
   2586 }
   2587 
   2588 
   2589 void FullCodeGenerator::CallIC(Handle<Code> code,
   2590                                TypeFeedbackId id) {
   2591   ic_total_count_++;
   2592   __ Call(code, RelocInfo::CODE_TARGET, id);
   2593 }
   2594 
   2595 
   2596 // Code common for calls using the IC.
   2597 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
   2598   Expression* callee = expr->expression();
   2599 
   2600   CallIC::CallType call_type = callee->IsVariableProxy()
   2601       ? CallIC::FUNCTION
   2602       : CallIC::METHOD;
   2603 
   2604   // Get the target function.
   2605   if (call_type == CallIC::FUNCTION) {
   2606     { StackValueContext context(this);
   2607       EmitVariableLoad(callee->AsVariableProxy());
   2608       PrepareForBailout(callee, NO_REGISTERS);
   2609     }
   2610     // Push undefined as receiver. This is patched in the method prologue if it
   2611     // is a sloppy mode method.
   2612     __ Push(isolate()->factory()->undefined_value());
   2613   } else {
   2614     // Load the function from the receiver.
   2615     ASSERT(callee->IsProperty());
   2616     __ lw(v0, MemOperand(sp, 0));
   2617     EmitNamedPropertyLoad(callee->AsProperty());
   2618     PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
   2619     // Push the target function under the receiver.
   2620     __ lw(at, MemOperand(sp, 0));
   2621     __ push(at);
   2622     __ sw(v0, MemOperand(sp, kPointerSize));
   2623   }
   2624 
   2625   EmitCall(expr, call_type);
   2626 }
   2627 
   2628 
   2629 // Code common for calls using the IC.
   2630 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
   2631                                                 Expression* key) {
   2632   // Load the key.
   2633   VisitForAccumulatorValue(key);
   2634 
   2635   Expression* callee = expr->expression();
   2636 
   2637   // Load the function from the receiver.
   2638   ASSERT(callee->IsProperty());
   2639   __ lw(a1, MemOperand(sp, 0));
   2640   EmitKeyedPropertyLoad(callee->AsProperty());
   2641   PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
   2642 
   2643   // Push the target function under the receiver.
   2644   __ lw(at, MemOperand(sp, 0));
   2645   __ push(at);
   2646   __ sw(v0, MemOperand(sp, kPointerSize));
   2647 
   2648   EmitCall(expr, CallIC::METHOD);
   2649 }
   2650 
   2651 
   2652 void FullCodeGenerator::EmitCall(Call* expr, CallIC::CallType call_type) {
   2653   // Load the arguments.
   2654   ZoneList<Expression*>* args = expr->arguments();
   2655   int arg_count = args->length();
   2656   { PreservePositionScope scope(masm()->positions_recorder());
   2657     for (int i = 0; i < arg_count; i++) {
   2658       VisitForStackValue(args->at(i));
   2659     }
   2660   }
   2661 
   2662   // Record source position of the IC call.
   2663   SetSourcePosition(expr->position());
   2664   Handle<Code> ic = CallIC::initialize_stub(
   2665       isolate(), arg_count, call_type);
   2666   __ li(a3, Operand(Smi::FromInt(expr->CallFeedbackSlot())));
   2667   __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
   2668   // Don't assign a type feedback id to the IC, since type feedback is provided
   2669   // by the vector above.
   2670   CallIC(ic);
   2671 
   2672   RecordJSReturnSite(expr);
   2673   // Restore context register.
   2674   __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
   2675   context()->DropAndPlug(1, v0);
   2676 }
   2677 
   2678 
   2679 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
   2680   // t2: copy of the first argument or undefined if it doesn't exist.
   2681   if (arg_count > 0) {
   2682     __ lw(t2, MemOperand(sp, arg_count * kPointerSize));
   2683   } else {
   2684     __ LoadRoot(t2, Heap::kUndefinedValueRootIndex);
   2685   }
   2686 
   2687   // t1: the receiver of the enclosing function.
   2688   int receiver_offset = 2 + info_->scope()->num_parameters();
   2689   __ lw(t1, MemOperand(fp, receiver_offset * kPointerSize));
   2690 
   2691   // t0: the strict mode.
   2692   __ li(t0, Operand(Smi::FromInt(strict_mode())));
   2693 
   2694   // a1: the start position of the scope the calls resides in.
   2695   __ li(a1, Operand(Smi::FromInt(scope()->start_position())));
   2696 
   2697   // Do the runtime call.
   2698   __ Push(t2, t1, t0, a1);
   2699   __ CallRuntime(Runtime::kHiddenResolvePossiblyDirectEval, 5);
   2700 }
   2701 
   2702 
   2703 void FullCodeGenerator::VisitCall(Call* expr) {
   2704 #ifdef DEBUG
   2705   // We want to verify that RecordJSReturnSite gets called on all paths
   2706   // through this function.  Avoid early returns.
   2707   expr->return_is_recorded_ = false;
   2708 #endif
   2709 
   2710   Comment cmnt(masm_, "[ Call");
   2711   Expression* callee = expr->expression();
   2712   Call::CallType call_type = expr->GetCallType(isolate());
   2713 
   2714   if (call_type == Call::POSSIBLY_EVAL_CALL) {
   2715     // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
   2716     // to resolve the function we need to call and the receiver of the
   2717     // call.  Then we call the resolved function using the given
   2718     // arguments.
   2719     ZoneList<Expression*>* args = expr->arguments();
   2720     int arg_count = args->length();
   2721 
   2722     { PreservePositionScope pos_scope(masm()->positions_recorder());
   2723       VisitForStackValue(callee);
   2724       __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
   2725       __ push(a2);  // Reserved receiver slot.
   2726 
   2727       // Push the arguments.
   2728       for (int i = 0; i < arg_count; i++) {
   2729         VisitForStackValue(args->at(i));
   2730       }
   2731 
   2732       // Push a copy of the function (found below the arguments) and
   2733       // resolve eval.
   2734       __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
   2735       __ push(a1);
   2736       EmitResolvePossiblyDirectEval(arg_count);
   2737 
   2738       // The runtime call returns a pair of values in v0 (function) and
   2739       // v1 (receiver). Touch up the stack with the right values.
   2740       __ sw(v0, MemOperand(sp, (arg_count + 1) * kPointerSize));
   2741       __ sw(v1, MemOperand(sp, arg_count * kPointerSize));
   2742     }
   2743     // Record source position for debugger.
   2744     SetSourcePosition(expr->position());
   2745     CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
   2746     __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
   2747     __ CallStub(&stub);
   2748     RecordJSReturnSite(expr);
   2749     // Restore context register.
   2750     __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
   2751     context()->DropAndPlug(1, v0);
   2752   } else if (call_type == Call::GLOBAL_CALL) {
   2753     EmitCallWithLoadIC(expr);
   2754   } else if (call_type == Call::LOOKUP_SLOT_CALL) {
   2755     // Call to a lookup slot (dynamically introduced variable).
   2756     VariableProxy* proxy = callee->AsVariableProxy();
   2757     Label slow, done;
   2758 
   2759     { PreservePositionScope scope(masm()->positions_recorder());
   2760       // Generate code for loading from variables potentially shadowed
   2761       // by eval-introduced variables.
   2762       EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
   2763     }
   2764 
   2765     __ bind(&slow);
   2766     // Call the runtime to find the function to call (returned in v0)
   2767     // and the object holding it (returned in v1).
   2768     ASSERT(!context_register().is(a2));
   2769     __ li(a2, Operand(proxy->name()));
   2770     __ Push(context_register(), a2);
   2771     __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2);
   2772     __ Push(v0, v1);  // Function, receiver.
   2773 
   2774     // If fast case code has been generated, emit code to push the
   2775     // function and receiver and have the slow path jump around this
   2776     // code.
   2777     if (done.is_linked()) {
   2778       Label call;
   2779       __ Branch(&call);
   2780       __ bind(&done);
   2781       // Push function.
   2782       __ push(v0);
   2783       // The receiver is implicitly the global receiver. Indicate this
   2784       // by passing the hole to the call function stub.
   2785       __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
   2786       __ push(a1);
   2787       __ bind(&call);
   2788     }
   2789 
   2790     // The receiver is either the global receiver or an object found
   2791     // by LoadContextSlot.
   2792     EmitCall(expr);
   2793   } else if (call_type == Call::PROPERTY_CALL) {
   2794     Property* property = callee->AsProperty();
   2795     { PreservePositionScope scope(masm()->positions_recorder());
   2796       VisitForStackValue(property->obj());
   2797     }
   2798     if (property->key()->IsPropertyName()) {
   2799       EmitCallWithLoadIC(expr);
   2800     } else {
   2801       EmitKeyedCallWithLoadIC(expr, property->key());
   2802     }
   2803   } else {
   2804     ASSERT(call_type == Call::OTHER_CALL);
   2805     // Call to an arbitrary expression not handled specially above.
   2806     { PreservePositionScope scope(masm()->positions_recorder());
   2807       VisitForStackValue(callee);
   2808     }
   2809     __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
   2810     __ push(a1);
   2811     // Emit function call.
   2812     EmitCall(expr);
   2813   }
   2814 
   2815 #ifdef DEBUG
   2816   // RecordJSReturnSite should have been called.
   2817   ASSERT(expr->return_is_recorded_);
   2818 #endif
   2819 }
   2820 
   2821 
   2822 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
   2823   Comment cmnt(masm_, "[ CallNew");
   2824   // According to ECMA-262, section 11.2.2, page 44, the function
   2825   // expression in new calls must be evaluated before the
   2826   // arguments.
   2827 
   2828   // Push constructor on the stack.  If it's not a function it's used as
   2829   // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
   2830   // ignored.
   2831   VisitForStackValue(expr->expression());
   2832 
   2833   // Push the arguments ("left-to-right") on the stack.
   2834   ZoneList<Expression*>* args = expr->arguments();
   2835   int arg_count = args->length();
   2836   for (int i = 0; i < arg_count; i++) {
   2837     VisitForStackValue(args->at(i));
   2838   }
   2839 
   2840   // Call the construct call builtin that handles allocation and
   2841   // constructor invocation.
   2842   SetSourcePosition(expr->position());
   2843 
   2844   // Load function and argument count into a1 and a0.
   2845   __ li(a0, Operand(arg_count));
   2846   __ lw(a1, MemOperand(sp, arg_count * kPointerSize));
   2847 
   2848   // Record call targets in unoptimized code.
   2849   if (FLAG_pretenuring_call_new) {
   2850     EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
   2851     ASSERT(expr->AllocationSiteFeedbackSlot() ==
   2852            expr->CallNewFeedbackSlot() + 1);
   2853   }
   2854 
   2855   __ li(a2, FeedbackVector());
   2856   __ li(a3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot())));
   2857 
   2858   CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
   2859   __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
   2860   PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
   2861   context()->Plug(v0);
   2862 }
   2863 
   2864 
   2865 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
   2866   ZoneList<Expression*>* args = expr->arguments();
   2867   ASSERT(args->length() == 1);
   2868 
   2869   VisitForAccumulatorValue(args->at(0));
   2870 
   2871   Label materialize_true, materialize_false;
   2872   Label* if_true = NULL;
   2873   Label* if_false = NULL;
   2874   Label* fall_through = NULL;
   2875   context()->PrepareTest(&materialize_true, &materialize_false,
   2876                          &if_true, &if_false, &fall_through);
   2877 
   2878   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2879   __ SmiTst(v0, t0);
   2880   Split(eq, t0, Operand(zero_reg), if_true, if_false, fall_through);
   2881 
   2882   context()->Plug(if_true, if_false);
   2883 }
   2884 
   2885 
   2886 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
   2887   ZoneList<Expression*>* args = expr->arguments();
   2888   ASSERT(args->length() == 1);
   2889 
   2890   VisitForAccumulatorValue(args->at(0));
   2891 
   2892   Label materialize_true, materialize_false;
   2893   Label* if_true = NULL;
   2894   Label* if_false = NULL;
   2895   Label* fall_through = NULL;
   2896   context()->PrepareTest(&materialize_true, &materialize_false,
   2897                          &if_true, &if_false, &fall_through);
   2898 
   2899   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2900   __ NonNegativeSmiTst(v0, at);
   2901   Split(eq, at, Operand(zero_reg), if_true, if_false, fall_through);
   2902 
   2903   context()->Plug(if_true, if_false);
   2904 }
   2905 
   2906 
   2907 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
   2908   ZoneList<Expression*>* args = expr->arguments();
   2909   ASSERT(args->length() == 1);
   2910 
   2911   VisitForAccumulatorValue(args->at(0));
   2912 
   2913   Label materialize_true, materialize_false;
   2914   Label* if_true = NULL;
   2915   Label* if_false = NULL;
   2916   Label* fall_through = NULL;
   2917   context()->PrepareTest(&materialize_true, &materialize_false,
   2918                          &if_true, &if_false, &fall_through);
   2919 
   2920   __ JumpIfSmi(v0, if_false);
   2921   __ LoadRoot(at, Heap::kNullValueRootIndex);
   2922   __ Branch(if_true, eq, v0, Operand(at));
   2923   __ lw(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
   2924   // Undetectable objects behave like undefined when tested with typeof.
   2925   __ lbu(a1, FieldMemOperand(a2, Map::kBitFieldOffset));
   2926   __ And(at, a1, Operand(1 << Map::kIsUndetectable));
   2927   __ Branch(if_false, ne, at, Operand(zero_reg));
   2928   __ lbu(a1, FieldMemOperand(a2, Map::kInstanceTypeOffset));
   2929   __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
   2930   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2931   Split(le, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE),
   2932         if_true, if_false, fall_through);
   2933 
   2934   context()->Plug(if_true, if_false);
   2935 }
   2936 
   2937 
   2938 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
   2939   ZoneList<Expression*>* args = expr->arguments();
   2940   ASSERT(args->length() == 1);
   2941 
   2942   VisitForAccumulatorValue(args->at(0));
   2943 
   2944   Label materialize_true, materialize_false;
   2945   Label* if_true = NULL;
   2946   Label* if_false = NULL;
   2947   Label* fall_through = NULL;
   2948   context()->PrepareTest(&materialize_true, &materialize_false,
   2949                          &if_true, &if_false, &fall_through);
   2950 
   2951   __ JumpIfSmi(v0, if_false);
   2952   __ GetObjectType(v0, a1, a1);
   2953   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2954   Split(ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE),
   2955         if_true, if_false, fall_through);
   2956 
   2957   context()->Plug(if_true, if_false);
   2958 }
   2959 
   2960 
   2961 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
   2962   ZoneList<Expression*>* args = expr->arguments();
   2963   ASSERT(args->length() == 1);
   2964 
   2965   VisitForAccumulatorValue(args->at(0));
   2966 
   2967   Label materialize_true, materialize_false;
   2968   Label* if_true = NULL;
   2969   Label* if_false = NULL;
   2970   Label* fall_through = NULL;
   2971   context()->PrepareTest(&materialize_true, &materialize_false,
   2972                          &if_true, &if_false, &fall_through);
   2973 
   2974   __ JumpIfSmi(v0, if_false);
   2975   __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
   2976   __ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset));
   2977   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2978   __ And(at, a1, Operand(1 << Map::kIsUndetectable));
   2979   Split(ne, at, Operand(zero_reg), if_true, if_false, fall_through);
   2980 
   2981   context()->Plug(if_true, if_false);
   2982 }
   2983 
   2984 
   2985 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
   2986     CallRuntime* expr) {
   2987   ZoneList<Expression*>* args = expr->arguments();
   2988   ASSERT(args->length() == 1);
   2989 
   2990   VisitForAccumulatorValue(args->at(0));
   2991 
   2992   Label materialize_true, materialize_false, skip_lookup;
   2993   Label* if_true = NULL;
   2994   Label* if_false = NULL;
   2995   Label* fall_through = NULL;
   2996   context()->PrepareTest(&materialize_true, &materialize_false,
   2997                          &if_true, &if_false, &fall_through);
   2998 
   2999   __ AssertNotSmi(v0);
   3000 
   3001   __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
   3002   __ lbu(t0, FieldMemOperand(a1, Map::kBitField2Offset));
   3003   __ And(t0, t0, 1 << Map::kStringWrapperSafeForDefaultValueOf);
   3004   __ Branch(&skip_lookup, ne, t0, Operand(zero_reg));
   3005 
   3006   // Check for fast case object. Generate false result for slow case object.
   3007   __ lw(a2, FieldMemOperand(v0, JSObject::kPropertiesOffset));
   3008   __ lw(a2, FieldMemOperand(a2, HeapObject::kMapOffset));
   3009   __ LoadRoot(t0, Heap::kHashTableMapRootIndex);
   3010   __ Branch(if_false, eq, a2, Operand(t0));
   3011 
   3012   // Look for valueOf name in the descriptor array, and indicate false if
   3013   // found. Since we omit an enumeration index check, if it is added via a
   3014   // transition that shares its descriptor array, this is a false positive.
   3015   Label entry, loop, done;
   3016 
   3017   // Skip loop if no descriptors are valid.
   3018   __ NumberOfOwnDescriptors(a3, a1);
   3019   __ Branch(&done, eq, a3, Operand(zero_reg));
   3020 
   3021   __ LoadInstanceDescriptors(a1, t0);
   3022   // t0: descriptor array.
   3023   // a3: valid entries in the descriptor array.
   3024   STATIC_ASSERT(kSmiTag == 0);
   3025   STATIC_ASSERT(kSmiTagSize == 1);
   3026   STATIC_ASSERT(kPointerSize == 4);
   3027   __ li(at, Operand(DescriptorArray::kDescriptorSize));
   3028   __ Mul(a3, a3, at);
   3029   // Calculate location of the first key name.
   3030   __ Addu(t0, t0, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
   3031   // Calculate the end of the descriptor array.
   3032   __ mov(a2, t0);
   3033   __ sll(t1, a3, kPointerSizeLog2);
   3034   __ Addu(a2, a2, t1);
   3035 
   3036   // Loop through all the keys in the descriptor array. If one of these is the
   3037   // string "valueOf" the result is false.
   3038   // The use of t2 to store the valueOf string assumes that it is not otherwise
   3039   // used in the loop below.
   3040   __ li(t2, Operand(isolate()->factory()->value_of_string()));
   3041   __ jmp(&entry);
   3042   __ bind(&loop);
   3043   __ lw(a3, MemOperand(t0, 0));
   3044   __ Branch(if_false, eq, a3, Operand(t2));
   3045   __ Addu(t0, t0, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
   3046   __ bind(&entry);
   3047   __ Branch(&loop, ne, t0, Operand(a2));
   3048 
   3049   __ bind(&done);
   3050 
   3051   // Set the bit in the map to indicate that there is no local valueOf field.
   3052   __ lbu(a2, FieldMemOperand(a1, Map::kBitField2Offset));
   3053   __ Or(a2, a2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
   3054   __ sb(a2, FieldMemOperand(a1, Map::kBitField2Offset));
   3055 
   3056   __ bind(&skip_lookup);
   3057 
   3058   // If a valueOf property is not found on the object check that its
   3059   // prototype is the un-modified String prototype. If not result is false.
   3060   __ lw(a2, FieldMemOperand(a1, Map::kPrototypeOffset));
   3061   __ JumpIfSmi(a2, if_false);
   3062   __ lw(a2, FieldMemOperand(a2, HeapObject::kMapOffset));
   3063   __ lw(a3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
   3064   __ lw(a3, FieldMemOperand(a3, GlobalObject::kNativeContextOffset));
   3065   __ lw(a3, ContextOperand(a3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
   3066   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   3067   Split(eq, a2, Operand(a3), if_true, if_false, fall_through);
   3068 
   3069   context()->Plug(if_true, if_false);
   3070 }
   3071 
   3072 
   3073 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
   3074   ZoneList<Expression*>* args = expr->arguments();
   3075   ASSERT(args->length() == 1);
   3076 
   3077   VisitForAccumulatorValue(args->at(0));
   3078 
   3079   Label materialize_true, materialize_false;
   3080   Label* if_true = NULL;
   3081   Label* if_false = NULL;
   3082   Label* fall_through = NULL;
   3083   context()->PrepareTest(&materialize_true, &materialize_false,
   3084                          &if_true, &if_false, &fall_through);
   3085 
   3086   __ JumpIfSmi(v0, if_false);
   3087   __ GetObjectType(v0, a1, a2);
   3088   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   3089   __ Branch(if_true, eq, a2, Operand(JS_FUNCTION_TYPE));
   3090   __ Branch(if_false);
   3091 
   3092   context()->Plug(if_true, if_false);
   3093 }
   3094 
   3095 
   3096 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
   3097   ZoneList<Expression*>* args = expr->arguments();
   3098   ASSERT(args->length() == 1);
   3099 
   3100   VisitForAccumulatorValue(args->at(0));
   3101 
   3102   Label materialize_true, materialize_false;
   3103   Label* if_true = NULL;
   3104   Label* if_false = NULL;
   3105   Label* fall_through = NULL;
   3106   context()->PrepareTest(&materialize_true, &materialize_false,
   3107                          &if_true, &if_false, &fall_through);
   3108 
   3109   __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
   3110   __ lw(a2, FieldMemOperand(v0, HeapNumber::kExponentOffset));
   3111   __ lw(a1, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
   3112   __ li(t0, 0x80000000);
   3113   Label not_nan;
   3114   __ Branch(&not_nan, ne, a2, Operand(t0));
   3115   __ mov(t0, zero_reg);
   3116   __ mov(a2, a1);
   3117   __ bind(&not_nan);
   3118 
   3119   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   3120   Split(eq, a2, Operand(t0), if_true, if_false, fall_through);
   3121 
   3122   context()->Plug(if_true, if_false);
   3123 }
   3124 
   3125 
   3126 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
   3127   ZoneList<Expression*>* args = expr->arguments();
   3128   ASSERT(args->length() == 1);
   3129 
   3130   VisitForAccumulatorValue(args->at(0));
   3131 
   3132   Label materialize_true, materialize_false;
   3133   Label* if_true = NULL;
   3134   Label* if_false = NULL;
   3135   Label* fall_through = NULL;
   3136   context()->PrepareTest(&materialize_true, &materialize_false,
   3137                          &if_true, &if_false, &fall_through);
   3138 
   3139   __ JumpIfSmi(v0, if_false);
   3140   __ GetObjectType(v0, a1, a1);
   3141   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   3142   Split(eq, a1, Operand(JS_ARRAY_TYPE),
   3143         if_true, if_false, fall_through);
   3144 
   3145   context()->Plug(if_true, if_false);
   3146 }
   3147 
   3148 
   3149 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
   3150   ZoneList<Expression*>* args = expr->arguments();
   3151   ASSERT(args->length() == 1);
   3152 
   3153   VisitForAccumulatorValue(args->at(0));
   3154 
   3155   Label materialize_true, materialize_false;
   3156   Label* if_true = NULL;
   3157   Label* if_false = NULL;
   3158   Label* fall_through = NULL;
   3159   context()->PrepareTest(&materialize_true, &materialize_false,
   3160                          &if_true, &if_false, &fall_through);
   3161 
   3162   __ JumpIfSmi(v0, if_false);
   3163   __ GetObjectType(v0, a1, a1);
   3164   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   3165   Split(eq, a1, Operand(JS_REGEXP_TYPE), if_true, if_false, fall_through);
   3166 
   3167   context()->Plug(if_true, if_false);
   3168 }
   3169 
   3170 
   3171 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
   3172   ASSERT(expr->arguments()->length() == 0);
   3173 
   3174   Label materialize_true, materialize_false;
   3175   Label* if_true = NULL;
   3176   Label* if_false = NULL;
   3177   Label* fall_through = NULL;
   3178   context()->PrepareTest(&materialize_true, &materialize_false,
   3179                          &if_true, &if_false, &fall_through);
   3180 
   3181   // Get the frame pointer for the calling frame.
   3182   __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
   3183 
   3184   // Skip the arguments adaptor frame if it exists.
   3185   Label check_frame_marker;
   3186   __ lw(a1, MemOperand(a2, StandardFrameConstants::kContextOffset));
   3187   __ Branch(&check_frame_marker, ne,
   3188             a1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
   3189   __ lw(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
   3190 
   3191   // Check the marker in the calling frame.
   3192   __ bind(&check_frame_marker);
   3193   __ lw(a1, MemOperand(a2, StandardFrameConstants::kMarkerOffset));
   3194   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   3195   Split(eq, a1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)),
   3196         if_true, if_false, fall_through);
   3197 
   3198   context()->Plug(if_true, if_false);
   3199 }
   3200 
   3201 
   3202 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
   3203   ZoneList<Expression*>* args = expr->arguments();
   3204   ASSERT(args->length() == 2);
   3205 
   3206   // Load the two objects into registers and perform the comparison.
   3207   VisitForStackValue(args->at(0));
   3208   VisitForAccumulatorValue(args->at(1));
   3209 
   3210   Label materialize_true, materialize_false;
   3211   Label* if_true = NULL;
   3212   Label* if_false = NULL;
   3213   Label* fall_through = NULL;
   3214   context()->PrepareTest(&materialize_true, &materialize_false,
   3215                          &if_true, &if_false, &fall_through);
   3216 
   3217   __ pop(a1);
   3218   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   3219   Split(eq, v0, Operand(a1), if_true, if_false, fall_through);
   3220 
   3221   context()->Plug(if_true, if_false);
   3222 }
   3223 
   3224 
   3225 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
   3226   ZoneList<Expression*>* args = expr->arguments();
   3227   ASSERT(args->length() == 1);
   3228 
   3229   // ArgumentsAccessStub expects the key in a1 and the formal
   3230   // parameter count in a0.
   3231   VisitForAccumulatorValue(args->at(0));
   3232   __ mov(a1, v0);
   3233   __ li(a0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
   3234   ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
   3235   __ CallStub(&stub);
   3236   context()->Plug(v0);
   3237 }
   3238 
   3239 
   3240 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
   3241   ASSERT(expr->arguments()->length() == 0);
   3242   Label exit;
   3243   // Get the number of formal parameters.
   3244   __ li(v0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
   3245 
   3246   // Check if the calling frame is an arguments adaptor frame.
   3247   __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
   3248   __ lw(a3, MemOperand(a2, StandardFrameConstants::kContextOffset));
   3249   __ Branch(&exit, ne, a3,
   3250             Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
   3251 
   3252   // Arguments adaptor case: Read the arguments length from the
   3253   // adaptor frame.
   3254   __ lw(v0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
   3255 
   3256   __ bind(&exit);
   3257   context()->Plug(v0);
   3258 }
   3259 
   3260 
   3261 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
   3262   ZoneList<Expression*>* args = expr->arguments();
   3263   ASSERT(args->length() == 1);
   3264   Label done, null, function, non_function_constructor;
   3265 
   3266   VisitForAccumulatorValue(args->at(0));
   3267 
   3268   // If the object is a smi, we return null.
   3269   __ JumpIfSmi(v0, &null);
   3270 
   3271   // Check that the object is a JS object but take special care of JS
   3272   // functions to make sure they have 'Function' as their class.
   3273   // Assume that there are only two callable types, and one of them is at
   3274   // either end of the type range for JS object types. Saves extra comparisons.
   3275   STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
   3276   __ GetObjectType(v0, v0, a1);  // Map is now in v0.
   3277   __ Branch(&null, lt, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
   3278 
   3279   STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
   3280                 FIRST_SPEC_OBJECT_TYPE + 1);
   3281   __ Branch(&function, eq, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
   3282 
   3283   STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
   3284                 LAST_SPEC_OBJECT_TYPE - 1);
   3285   __ Branch(&function, eq, a1, Operand(LAST_SPEC_OBJECT_TYPE));
   3286   // Assume that there is no larger type.
   3287   STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
   3288 
   3289   // Check if the constructor in the map is a JS function.
   3290   __ lw(v0, FieldMemOperand(v0, Map::kConstructorOffset));
   3291   __ GetObjectType(v0, a1, a1);
   3292   __ Branch(&non_function_constructor, ne, a1, Operand(JS_FUNCTION_TYPE));
   3293 
   3294   // v0 now contains the constructor function. Grab the
   3295   // instance class name from there.
   3296   __ lw(v0, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset));
   3297   __ lw(v0, FieldMemOperand(v0, SharedFunctionInfo::kInstanceClassNameOffset));
   3298   __ Branch(&done);
   3299 
   3300   // Functions have class 'Function'.
   3301   __ bind(&function);
   3302   __ LoadRoot(v0, Heap::kfunction_class_stringRootIndex);
   3303   __ jmp(&done);
   3304 
   3305   // Objects with a non-function constructor have class 'Object'.
   3306   __ bind(&non_function_constructor);
   3307   __ LoadRoot(v0, Heap::kObject_stringRootIndex);
   3308   __ jmp(&done);
   3309 
   3310   // Non-JS objects have class null.
   3311   __ bind(&null);
   3312   __ LoadRoot(v0, Heap::kNullValueRootIndex);
   3313 
   3314   // All done.
   3315   __ bind(&done);
   3316 
   3317   context()->Plug(v0);
   3318 }
   3319 
   3320 
   3321 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
   3322   // Load the arguments on the stack and call the stub.
   3323   SubStringStub stub(isolate());
   3324   ZoneList<Expression*>* args = expr->arguments();
   3325   ASSERT(args->length() == 3);
   3326   VisitForStackValue(args->at(0));
   3327   VisitForStackValue(args->at(1));
   3328   VisitForStackValue(args->at(2));
   3329   __ CallStub(&stub);
   3330   context()->Plug(v0);
   3331 }
   3332 
   3333 
   3334 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
   3335   // Load the arguments on the stack and call the stub.
   3336   RegExpExecStub stub(isolate());
   3337   ZoneList<Expression*>* args = expr->arguments();
   3338   ASSERT(args->length() == 4);
   3339   VisitForStackValue(args->at(0));
   3340   VisitForStackValue(args->at(1));
   3341   VisitForStackValue(args->at(2));
   3342   VisitForStackValue(args->at(3));
   3343   __ CallStub(&stub);
   3344   context()->Plug(v0);
   3345 }
   3346 
   3347 
   3348 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
   3349   ZoneList<Expression*>* args = expr->arguments();
   3350   ASSERT(args->length() == 1);
   3351 
   3352   VisitForAccumulatorValue(args->at(0));  // Load the object.
   3353 
   3354   Label done;
   3355   // If the object is a smi return the object.
   3356   __ JumpIfSmi(v0, &done);
   3357   // If the object is not a value type, return the object.
   3358   __ GetObjectType(v0, a1, a1);
   3359   __ Branch(&done, ne, a1, Operand(JS_VALUE_TYPE));
   3360 
   3361   __ lw(v0, FieldMemOperand(v0, JSValue::kValueOffset));
   3362 
   3363   __ bind(&done);
   3364   context()->Plug(v0);
   3365 }
   3366 
   3367 
   3368 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
   3369   ZoneList<Expression*>* args = expr->arguments();
   3370   ASSERT(args->length() == 2);
   3371   ASSERT_NE(NULL, args->at(1)->AsLiteral());
   3372   Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
   3373 
   3374   VisitForAccumulatorValue(args->at(0));  // Load the object.
   3375 
   3376   Label runtime, done, not_date_object;
   3377   Register object = v0;
   3378   Register result = v0;
   3379   Register scratch0 = t5;
   3380   Register scratch1 = a1;
   3381 
   3382   __ JumpIfSmi(object, &not_date_object);
   3383   __ GetObjectType(object, scratch1, scratch1);
   3384   __ Branch(&not_date_object, ne, scratch1, Operand(JS_DATE_TYPE));
   3385 
   3386   if (index->value() == 0) {
   3387     __ lw(result, FieldMemOperand(object, JSDate::kValueOffset));
   3388     __ jmp(&done);
   3389   } else {
   3390     if (index->value() < JSDate::kFirstUncachedField) {
   3391       ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
   3392       __ li(scratch1, Operand(stamp));
   3393       __ lw(scratch1, MemOperand(scratch1));
   3394       __ lw(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
   3395       __ Branch(&runtime, ne, scratch1, Operand(scratch0));
   3396       __ lw(result, FieldMemOperand(object, JSDate::kValueOffset +
   3397                                             kPointerSize * index->value()));
   3398       __ jmp(&done);
   3399     }
   3400     __ bind(&runtime);
   3401     __ PrepareCallCFunction(2, scratch1);
   3402     __ li(a1, Operand(index));
   3403     __ Move(a0, object);
   3404     __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
   3405     __ jmp(&done);
   3406   }
   3407 
   3408   __ bind(&not_date_object);
   3409   __ CallRuntime(Runtime::kHiddenThrowNotDateError, 0);
   3410   __ bind(&done);
   3411   context()->Plug(v0);
   3412 }
   3413 
   3414 
   3415 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
   3416   ZoneList<Expression*>* args = expr->arguments();
   3417   ASSERT_EQ(3, args->length());
   3418 
   3419   Register string = v0;
   3420   Register index = a1;
   3421   Register value = a2;
   3422 
   3423   VisitForStackValue(args->at(1));  // index
   3424   VisitForStackValue(args->at(2));  // value
   3425   VisitForAccumulatorValue(args->at(0));  // string
   3426   __ Pop(index, value);
   3427 
   3428   if (FLAG_debug_code) {
   3429     __ SmiTst(value, at);
   3430     __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
   3431     __ SmiTst(index, at);
   3432     __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
   3433     __ SmiUntag(index, index);
   3434     static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
   3435     Register scratch = t5;
   3436     __ EmitSeqStringSetCharCheck(
   3437         string, index, value, scratch, one_byte_seq_type);
   3438     __ SmiTag(index, index);
   3439   }
   3440 
   3441   __ SmiUntag(value, value);
   3442   __ Addu(at,
   3443           string,
   3444           Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
   3445   __ SmiUntag(index);
   3446   __ Addu(at, at, index);
   3447   __ sb(value, MemOperand(at));
   3448   context()->Plug(string);
   3449 }
   3450 
   3451 
   3452 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
   3453   ZoneList<Expression*>* args = expr->arguments();
   3454   ASSERT_EQ(3, args->length());
   3455 
   3456   Register string = v0;
   3457   Register index = a1;
   3458   Register value = a2;
   3459 
   3460   VisitForStackValue(args->at(1));  // index
   3461   VisitForStackValue(args->at(2));  // value
   3462   VisitForAccumulatorValue(args->at(0));  // string
   3463   __ Pop(index, value);
   3464 
   3465   if (FLAG_debug_code) {
   3466     __ SmiTst(value, at);
   3467     __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
   3468     __ SmiTst(index, at);
   3469     __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
   3470     __ SmiUntag(index, index);
   3471     static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
   3472     Register scratch = t5;
   3473     __ EmitSeqStringSetCharCheck(
   3474         string, index, value, scratch, two_byte_seq_type);
   3475     __ SmiTag(index, index);
   3476   }
   3477 
   3478   __ SmiUntag(value, value);
   3479   __ Addu(at,
   3480           string,
   3481           Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
   3482   __ Addu(at, at, index);
   3483   STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
   3484   __ sh(value, MemOperand(at));
   3485     context()->Plug(string);
   3486 }
   3487 
   3488 
   3489 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
   3490   // Load the arguments on the stack and call the runtime function.
   3491   ZoneList<Expression*>* args = expr->arguments();
   3492   ASSERT(args->length() == 2);
   3493   VisitForStackValue(args->at(0));
   3494   VisitForStackValue(args->at(1));
   3495   MathPowStub stub(isolate(), MathPowStub::ON_STACK);
   3496   __ CallStub(&stub);
   3497   context()->Plug(v0);
   3498 }
   3499 
   3500 
   3501 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
   3502   ZoneList<Expression*>* args = expr->arguments();
   3503   ASSERT(args->length() == 2);
   3504 
   3505   VisitForStackValue(args->at(0));  // Load the object.
   3506   VisitForAccumulatorValue(args->at(1));  // Load the value.
   3507   __ pop(a1);  // v0 = value. a1 = object.
   3508 
   3509   Label done;
   3510   // If the object is a smi, return the value.
   3511   __ JumpIfSmi(a1, &done);
   3512 
   3513   // If the object is not a value type, return the value.
   3514   __ GetObjectType(a1, a2, a2);
   3515   __ Branch(&done, ne, a2, Operand(JS_VALUE_TYPE));
   3516 
   3517   // Store the value.
   3518   __ sw(v0, FieldMemOperand(a1, JSValue::kValueOffset));
   3519   // Update the write barrier.  Save the value as it will be
   3520   // overwritten by the write barrier code and is needed afterward.
   3521   __ mov(a2, v0);
   3522   __ RecordWriteField(
   3523       a1, JSValue::kValueOffset, a2, a3, kRAHasBeenSaved, kDontSaveFPRegs);
   3524 
   3525   __ bind(&done);
   3526   context()->Plug(v0);
   3527 }
   3528 
   3529 
   3530 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
   3531   ZoneList<Expression*>* args = expr->arguments();
   3532   ASSERT_EQ(args->length(), 1);
   3533 
   3534   // Load the argument into a0 and call the stub.
   3535   VisitForAccumulatorValue(args->at(0));
   3536   __ mov(a0, result_register());
   3537 
   3538   NumberToStringStub stub(isolate());
   3539   __ CallStub(&stub);
   3540   context()->Plug(v0);
   3541 }
   3542 
   3543 
   3544 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
   3545   ZoneList<Expression*>* args = expr->arguments();
   3546   ASSERT(args->length() == 1);
   3547 
   3548   VisitForAccumulatorValue(args->at(0));
   3549 
   3550   Label done;
   3551   StringCharFromCodeGenerator generator(v0, a1);
   3552   generator.GenerateFast(masm_);
   3553   __ jmp(&done);
   3554 
   3555   NopRuntimeCallHelper call_helper;
   3556   generator.GenerateSlow(masm_, call_helper);
   3557 
   3558   __ bind(&done);
   3559   context()->Plug(a1);
   3560 }
   3561 
   3562 
   3563 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
   3564   ZoneList<Expression*>* args = expr->arguments();
   3565   ASSERT(args->length() == 2);
   3566 
   3567   VisitForStackValue(args->at(0));
   3568   VisitForAccumulatorValue(args->at(1));
   3569   __ mov(a0, result_register());
   3570 
   3571   Register object = a1;
   3572   Register index = a0;
   3573   Register result = v0;
   3574 
   3575   __ pop(object);
   3576 
   3577   Label need_conversion;
   3578   Label index_out_of_range;
   3579   Label done;
   3580   StringCharCodeAtGenerator generator(object,
   3581                                       index,
   3582                                       result,
   3583                                       &need_conversion,
   3584                                       &need_conversion,
   3585                                       &index_out_of_range,
   3586                                       STRING_INDEX_IS_NUMBER);
   3587   generator.GenerateFast(masm_);
   3588   __ jmp(&done);
   3589 
   3590   __ bind(&index_out_of_range);
   3591   // When the index is out of range, the spec requires us to return
   3592   // NaN.
   3593   __ LoadRoot(result, Heap::kNanValueRootIndex);
   3594   __ jmp(&done);
   3595 
   3596   __ bind(&need_conversion);
   3597   // Load the undefined value into the result register, which will
   3598   // trigger conversion.
   3599   __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
   3600   __ jmp(&done);
   3601 
   3602   NopRuntimeCallHelper call_helper;
   3603   generator.GenerateSlow(masm_, call_helper);
   3604 
   3605   __ bind(&done);
   3606   context()->Plug(result);
   3607 }
   3608 
   3609 
   3610 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
   3611   ZoneList<Expression*>* args = expr->arguments();
   3612   ASSERT(args->length() == 2);
   3613 
   3614   VisitForStackValue(args->at(0));
   3615   VisitForAccumulatorValue(args->at(1));
   3616   __ mov(a0, result_register());
   3617 
   3618   Register object = a1;
   3619   Register index = a0;
   3620   Register scratch = a3;
   3621   Register result = v0;
   3622 
   3623   __ pop(object);
   3624 
   3625   Label need_conversion;
   3626   Label index_out_of_range;
   3627   Label done;
   3628   StringCharAtGenerator generator(object,
   3629                                   index,
   3630                                   scratch,
   3631                                   result,
   3632                                   &need_conversion,
   3633                                   &need_conversion,
   3634                                   &index_out_of_range,
   3635                                   STRING_INDEX_IS_NUMBER);
   3636   generator.GenerateFast(masm_);
   3637   __ jmp(&done);
   3638 
   3639   __ bind(&index_out_of_range);
   3640   // When the index is out of range, the spec requires us to return
   3641   // the empty string.
   3642   __ LoadRoot(result, Heap::kempty_stringRootIndex);
   3643   __ jmp(&done);
   3644 
   3645   __ bind(&need_conversion);
   3646   // Move smi zero into the result register, which will trigger
   3647   // conversion.
   3648   __ li(result, Operand(Smi::FromInt(0)));
   3649   __ jmp(&done);
   3650 
   3651   NopRuntimeCallHelper call_helper;
   3652   generator.GenerateSlow(masm_, call_helper);
   3653 
   3654   __ bind(&done);
   3655   context()->Plug(result);
   3656 }
   3657 
   3658 
   3659 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
   3660   ZoneList<Expression*>* args = expr->arguments();
   3661   ASSERT_EQ(2, args->length());
   3662   VisitForStackValue(args->at(0));
   3663   VisitForAccumulatorValue(args->at(1));
   3664 
   3665   __ pop(a1);
   3666   __ mov(a0, result_register());  // StringAddStub requires args in a0, a1.
   3667   StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
   3668   __ CallStub(&stub);
   3669   context()->Plug(v0);
   3670 }
   3671 
   3672 
   3673 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
   3674   ZoneList<Expression*>* args = expr->arguments();
   3675   ASSERT_EQ(2, args->length());
   3676 
   3677   VisitForStackValue(args->at(0));
   3678   VisitForStackValue(args->at(1));
   3679 
   3680   StringCompareStub stub(isolate());
   3681   __ CallStub(&stub);
   3682   context()->Plug(v0);
   3683 }
   3684 
   3685 
   3686 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
   3687   ZoneList<Expression*>* args = expr->arguments();
   3688   ASSERT(args->length() >= 2);
   3689 
   3690   int arg_count = args->length() - 2;  // 2 ~ receiver and function.
   3691   for (int i = 0; i < arg_count + 1; i++) {
   3692     VisitForStackValue(args->at(i));
   3693   }
   3694   VisitForAccumulatorValue(args->last());  // Function.
   3695 
   3696   Label runtime, done;
   3697   // Check for non-function argument (including proxy).
   3698   __ JumpIfSmi(v0, &runtime);
   3699   __ GetObjectType(v0, a1, a1);
   3700   __ Branch(&runtime, ne, a1, Operand(JS_FUNCTION_TYPE));
   3701 
   3702   // InvokeFunction requires the function in a1. Move it in there.
   3703   __ mov(a1, result_register());
   3704   ParameterCount count(arg_count);
   3705   __ InvokeFunction(a1, count, CALL_FUNCTION, NullCallWrapper());
   3706   __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
   3707   __ jmp(&done);
   3708 
   3709   __ bind(&runtime);
   3710   __ push(v0);
   3711   __ CallRuntime(Runtime::kCall, args->length());
   3712   __ bind(&done);
   3713 
   3714   context()->Plug(v0);
   3715 }
   3716 
   3717 
   3718 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
   3719   RegExpConstructResultStub stub(isolate());
   3720   ZoneList<Expression*>* args = expr->arguments();
   3721   ASSERT(args->length() == 3);
   3722   VisitForStackValue(args->at(0));
   3723   VisitForStackValue(args->at(1));
   3724   VisitForAccumulatorValue(args->at(2));
   3725   __ mov(a0, result_register());
   3726   __ pop(a1);
   3727   __ pop(a2);
   3728   __ CallStub(&stub);
   3729   context()->Plug(v0);
   3730 }
   3731 
   3732 
   3733 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
   3734   ZoneList<Expression*>* args = expr->arguments();
   3735   ASSERT_EQ(2, args->length());
   3736 
   3737   ASSERT_NE(NULL, args->at(0)->AsLiteral());
   3738   int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
   3739 
   3740   Handle<FixedArray> jsfunction_result_caches(
   3741       isolate()->native_context()->jsfunction_result_caches());
   3742   if (jsfunction_result_caches->length() <= cache_id) {
   3743     __ Abort(kAttemptToUseUndefinedCache);
   3744     __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
   3745     context()->Plug(v0);
   3746     return;
   3747   }
   3748 
   3749   VisitForAccumulatorValue(args->at(1));
   3750 
   3751   Register key = v0;
   3752   Register cache = a1;
   3753   __ lw(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
   3754   __ lw(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
   3755   __ lw(cache,
   3756          ContextOperand(
   3757              cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
   3758   __ lw(cache,
   3759          FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
   3760 
   3761 
   3762   Label done, not_found;
   3763   STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
   3764   __ lw(a2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
   3765   // a2 now holds finger offset as a smi.
   3766   __ Addu(a3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
   3767   // a3 now points to the start of fixed array elements.
   3768   __ sll(at, a2, kPointerSizeLog2 - kSmiTagSize);
   3769   __ addu(a3, a3, at);
   3770   // a3 now points to key of indexed element of cache.
   3771   __ lw(a2, MemOperand(a3));
   3772   __ Branch(&not_found, ne, key, Operand(a2));
   3773 
   3774   __ lw(v0, MemOperand(a3, kPointerSize));
   3775   __ Branch(&done);
   3776 
   3777   __ bind(&not_found);
   3778   // Call runtime to perform the lookup.
   3779   __ Push(cache, key);
   3780   __ CallRuntime(Runtime::kHiddenGetFromCache, 2);
   3781 
   3782   __ bind(&done);
   3783   context()->Plug(v0);
   3784 }
   3785 
   3786 
   3787 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
   3788   ZoneList<Expression*>* args = expr->arguments();
   3789   VisitForAccumulatorValue(args->at(0));
   3790 
   3791   Label materialize_true, materialize_false;
   3792   Label* if_true = NULL;
   3793   Label* if_false = NULL;
   3794   Label* fall_through = NULL;
   3795   context()->PrepareTest(&materialize_true, &materialize_false,
   3796                          &if_true, &if_false, &fall_through);
   3797 
   3798   __ lw(a0, FieldMemOperand(v0, String::kHashFieldOffset));
   3799   __ And(a0, a0, Operand(String::kContainsCachedArrayIndexMask));
   3800 
   3801   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   3802   Split(eq, a0, Operand(zero_reg), if_true, if_false, fall_through);
   3803 
   3804   context()->Plug(if_true, if_false);
   3805 }
   3806 
   3807 
   3808 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
   3809   ZoneList<Expression*>* args = expr->arguments();
   3810   ASSERT(args->length() == 1);
   3811   VisitForAccumulatorValue(args->at(0));
   3812 
   3813   __ AssertString(v0);
   3814 
   3815   __ lw(v0, FieldMemOperand(v0, String::kHashFieldOffset));
   3816   __ IndexFromHash(v0, v0);
   3817 
   3818   context()->Plug(v0);
   3819 }
   3820 
   3821 
   3822 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
   3823   Label bailout, done, one_char_separator, long_separator,
   3824       non_trivial_array, not_size_one_array, loop,
   3825       empty_separator_loop, one_char_separator_loop,
   3826       one_char_separator_loop_entry, long_separator_loop;
   3827   ZoneList<Expression*>* args = expr->arguments();
   3828   ASSERT(args->length() == 2);
   3829   VisitForStackValue(args->at(1));
   3830   VisitForAccumulatorValue(args->at(0));
   3831 
   3832   // All aliases of the same register have disjoint lifetimes.
   3833   Register array = v0;
   3834   Register elements = no_reg;  // Will be v0.
   3835   Register result = no_reg;  // Will be v0.
   3836   Register separator = a1;
   3837   Register array_length = a2;
   3838   Register result_pos = no_reg;  // Will be a2.
   3839   Register string_length = a3;
   3840   Register string = t0;
   3841   Register element = t1;
   3842   Register elements_end = t2;
   3843   Register scratch1 = t3;
   3844   Register scratch2 = t5;
   3845   Register scratch3 = t4;
   3846 
   3847   // Separator operand is on the stack.
   3848   __ pop(separator);
   3849 
   3850   // Check that the array is a JSArray.
   3851   __ JumpIfSmi(array, &bailout);
   3852   __ GetObjectType(array, scratch1, scratch2);
   3853   __ Branch(&bailout, ne, scratch2, Operand(JS_ARRAY_TYPE));
   3854 
   3855   // Check that the array has fast elements.
   3856   __ CheckFastElements(scratch1, scratch2, &bailout);
   3857 
   3858   // If the array has length zero, return the empty string.
   3859   __ lw(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
   3860   __ SmiUntag(array_length);
   3861   __ Branch(&non_trivial_array, ne, array_length, Operand(zero_reg));
   3862   __ LoadRoot(v0, Heap::kempty_stringRootIndex);
   3863   __ Branch(&done);
   3864 
   3865   __ bind(&non_trivial_array);
   3866 
   3867   // Get the FixedArray containing array's elements.
   3868   elements = array;
   3869   __ lw(elements, FieldMemOperand(array, JSArray::kElementsOffset));
   3870   array = no_reg;  // End of array's live range.
   3871 
   3872   // Check that all array elements are sequential ASCII strings, and
   3873   // accumulate the sum of their lengths, as a smi-encoded value.
   3874   __ mov(string_length, zero_reg);
   3875   __ Addu(element,
   3876           elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
   3877   __ sll(elements_end, array_length, kPointerSizeLog2);
   3878   __ Addu(elements_end, element, elements_end);
   3879   // Loop condition: while (element < elements_end).
   3880   // Live values in registers:
   3881   //   elements: Fixed array of strings.
   3882   //   array_length: Length of the fixed array of strings (not smi)
   3883   //   separator: Separator string
   3884   //   string_length: Accumulated sum of string lengths (smi).
   3885   //   element: Current array element.
   3886   //   elements_end: Array end.
   3887   if (generate_debug_code_) {
   3888     __ Assert(gt, kNoEmptyArraysHereInEmitFastAsciiArrayJoin,
   3889         array_length, Operand(zero_reg));
   3890   }
   3891   __ bind(&loop);
   3892   __ lw(string, MemOperand(element));
   3893   __ Addu(element, element, kPointerSize);
   3894   __ JumpIfSmi(string, &bailout);
   3895   __ lw(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
   3896   __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
   3897   __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
   3898   __ lw(scratch1, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
   3899   __ AdduAndCheckForOverflow(string_length, string_length, scratch1, scratch3);
   3900   __ BranchOnOverflow(&bailout, scratch3);
   3901   __ Branch(&loop, lt, element, Operand(elements_end));
   3902 
   3903   // If array_length is 1, return elements[0], a string.
   3904   __ Branch(&not_size_one_array, ne, array_length, Operand(1));
   3905   __ lw(v0, FieldMemOperand(elements, FixedArray::kHeaderSize));
   3906   __ Branch(&done);
   3907 
   3908   __ bind(&not_size_one_array);
   3909 
   3910   // Live values in registers:
   3911   //   separator: Separator string
   3912   //   array_length: Length of the array.
   3913   //   string_length: Sum of string lengths (smi).
   3914   //   elements: FixedArray of strings.
   3915 
   3916   // Check that the separator is a flat ASCII string.
   3917   __ JumpIfSmi(separator, &bailout);
   3918   __ lw(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
   3919   __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
   3920   __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
   3921 
   3922   // Add (separator length times array_length) - separator length to the
   3923   // string_length to get the length of the result string. array_length is not
   3924   // smi but the other values are, so the result is a smi.
   3925   __ lw(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
   3926   __ Subu(string_length, string_length, Operand(scratch1));
   3927   __ Mult(array_length, scratch1);
   3928   // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
   3929   // zero.
   3930   __ mfhi(scratch2);
   3931   __ Branch(&bailout, ne, scratch2, Operand(zero_reg));
   3932   __ mflo(scratch2);
   3933   __ And(scratch3, scratch2, Operand(0x80000000));
   3934   __ Branch(&bailout, ne, scratch3, Operand(zero_reg));
   3935   __ AdduAndCheckForOverflow(string_length, string_length, scratch2, scratch3);
   3936   __ BranchOnOverflow(&bailout, scratch3);
   3937   __ SmiUntag(string_length);
   3938 
   3939   // Get first element in the array to free up the elements register to be used
   3940   // for the result.
   3941   __ Addu(element,
   3942           elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
   3943   result = elements;  // End of live range for elements.
   3944   elements = no_reg;
   3945   // Live values in registers:
   3946   //   element: First array element
   3947   //   separator: Separator string
   3948   //   string_length: Length of result string (not smi)
   3949   //   array_length: Length of the array.
   3950   __ AllocateAsciiString(result,
   3951                          string_length,
   3952                          scratch1,
   3953                          scratch2,
   3954                          elements_end,
   3955                          &bailout);
   3956   // Prepare for looping. Set up elements_end to end of the array. Set
   3957   // result_pos to the position of the result where to write the first
   3958   // character.
   3959   __ sll(elements_end, array_length, kPointerSizeLog2);
   3960   __ Addu(elements_end, element, elements_end);
   3961   result_pos = array_length;  // End of live range for array_length.
   3962   array_length = no_reg;
   3963   __ Addu(result_pos,
   3964           result,
   3965           Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
   3966 
   3967   // Check the length of the separator.
   3968   __ lw(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
   3969   __ li(at, Operand(Smi::FromInt(1)));
   3970   __ Branch(&one_char_separator, eq, scratch1, Operand(at));
   3971   __ Branch(&long_separator, gt, scratch1, Operand(at));
   3972 
   3973   // Empty separator case.
   3974   __ bind(&empty_separator_loop);
   3975   // Live values in registers:
   3976   //   result_pos: the position to which we are currently copying characters.
   3977   //   element: Current array element.
   3978   //   elements_end: Array end.
   3979 
   3980   // Copy next array element to the result.
   3981   __ lw(string, MemOperand(element));
   3982   __ Addu(element, element, kPointerSize);
   3983   __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
   3984   __ SmiUntag(string_length);
   3985   __ Addu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
   3986   __ CopyBytes(string, result_pos, string_length, scratch1);
   3987   // End while (element < elements_end).
   3988   __ Branch(&empty_separator_loop, lt, element, Operand(elements_end));
   3989   ASSERT(result.is(v0));
   3990   __ Branch(&done);
   3991 
   3992   // One-character separator case.
   3993   __ bind(&one_char_separator);
   3994   // Replace separator with its ASCII character value.
   3995   __ lbu(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
   3996   // Jump into the loop after the code that copies the separator, so the first
   3997   // element is not preceded by a separator.
   3998   __ jmp(&one_char_separator_loop_entry);
   3999 
   4000   __ bind(&one_char_separator_loop);
   4001   // Live values in registers:
   4002   //   result_pos: the position to which we are currently copying characters.
   4003   //   element: Current array element.
   4004   //   elements_end: Array end.
   4005   //   separator: Single separator ASCII char (in lower byte).
   4006 
   4007   // Copy the separator character to the result.
   4008   __ sb(separator, MemOperand(result_pos));
   4009   __ Addu(result_pos, result_pos, 1);
   4010 
   4011   // Copy next array element to the result.
   4012   __ bind(&one_char_separator_loop_entry);
   4013   __ lw(string, MemOperand(element));
   4014   __ Addu(element, element, kPointerSize);
   4015   __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
   4016   __ SmiUntag(string_length);
   4017   __ Addu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
   4018   __ CopyBytes(string, result_pos, string_length, scratch1);
   4019   // End while (element < elements_end).
   4020   __ Branch(&one_char_separator_loop, lt, element, Operand(elements_end));
   4021   ASSERT(result.is(v0));
   4022   __ Branch(&done);
   4023 
   4024   // Long separator case (separator is more than one character). Entry is at the
   4025   // label long_separator below.
   4026   __ bind(&long_separator_loop);
   4027   // Live values in registers:
   4028   //   result_pos: the position to which we are currently copying characters.
   4029   //   element: Current array element.
   4030   //   elements_end: Array end.
   4031   //   separator: Separator string.
   4032 
   4033   // Copy the separator to the result.
   4034   __ lw(string_length, FieldMemOperand(separator, String::kLengthOffset));
   4035   __ SmiUntag(string_length);
   4036   __ Addu(string,
   4037           separator,
   4038           Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
   4039   __ CopyBytes(string, result_pos, string_length, scratch1);
   4040 
   4041   __ bind(&long_separator);
   4042   __ lw(string, MemOperand(element));
   4043   __ Addu(element, element, kPointerSize);
   4044   __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
   4045   __ SmiUntag(string_length);
   4046   __ Addu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
   4047   __ CopyBytes(string, result_pos, string_length, scratch1);
   4048   // End while (element < elements_end).
   4049   __ Branch(&long_separator_loop, lt, element, Operand(elements_end));
   4050   ASSERT(result.is(v0));
   4051   __ Branch(&done);
   4052 
   4053   __ bind(&bailout);
   4054   __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
   4055   __ bind(&done);
   4056   context()->Plug(v0);
   4057 }
   4058 
   4059 
   4060 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
   4061   if (expr->function() != NULL &&
   4062       expr->function()->intrinsic_type == Runtime::INLINE) {
   4063     Comment cmnt(masm_, "[ InlineRuntimeCall");
   4064     EmitInlineRuntimeCall(expr);
   4065     return;
   4066   }
   4067 
   4068   Comment cmnt(masm_, "[ CallRuntime");
   4069   ZoneList<Expression*>* args = expr->arguments();
   4070   int arg_count = args->length();
   4071 
   4072   if (expr->is_jsruntime()) {
   4073     // Push the builtins object as the receiver.
   4074     __ lw(a0, GlobalObjectOperand());
   4075     __ lw(a0, FieldMemOperand(a0, GlobalObject::kBuiltinsOffset));
   4076     __ push(a0);
   4077     // Load the function from the receiver.
   4078     __ li(a2, Operand(expr->name()));
   4079     CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
   4080 
   4081     // Push the target function under the receiver.
   4082     __ lw(at, MemOperand(sp, 0));
   4083     __ push(at);
   4084     __ sw(v0, MemOperand(sp, kPointerSize));
   4085 
   4086     // Push the arguments ("left-to-right").
   4087     int arg_count = args->length();
   4088     for (int i = 0; i < arg_count; i++) {
   4089       VisitForStackValue(args->at(i));
   4090     }
   4091 
   4092     // Record source position of the IC call.
   4093     SetSourcePosition(expr->position());
   4094     CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
   4095     __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
   4096     __ CallStub(&stub);
   4097 
   4098     // Restore context register.
   4099     __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
   4100 
   4101     context()->DropAndPlug(1, v0);
   4102   } else {
   4103     // Push the arguments ("left-to-right").
   4104     for (int i = 0; i < arg_count; i++) {
   4105       VisitForStackValue(args->at(i));
   4106     }
   4107 
   4108     // Call the C runtime function.
   4109     __ CallRuntime(expr->function(), arg_count);
   4110     context()->Plug(v0);
   4111   }
   4112 }
   4113 
   4114 
   4115 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
   4116   switch (expr->op()) {
   4117     case Token::DELETE: {
   4118       Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
   4119       Property* property = expr->expression()->AsProperty();
   4120       VariableProxy* proxy = expr->expression()->AsVariableProxy();
   4121 
   4122       if (property != NULL) {
   4123         VisitForStackValue(property->obj());
   4124         VisitForStackValue(property->key());
   4125         __ li(a1, Operand(Smi::FromInt(strict_mode())));
   4126         __ push(a1);
   4127         __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
   4128         context()->Plug(v0);
   4129       } else if (proxy != NULL) {
   4130         Variable* var = proxy->var();
   4131         // Delete of an unqualified identifier is disallowed in strict mode
   4132         // but "delete this" is allowed.
   4133         ASSERT(strict_mode() == SLOPPY || var->is_this());
   4134         if (var->IsUnallocated()) {
   4135           __ lw(a2, GlobalObjectOperand());
   4136           __ li(a1, Operand(var->name()));
   4137           __ li(a0, Operand(Smi::FromInt(SLOPPY)));
   4138           __ Push(a2, a1, a0);
   4139           __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
   4140           context()->Plug(v0);
   4141         } else if (var->IsStackAllocated() || var->IsContextSlot()) {
   4142           // Result of deleting non-global, non-dynamic variables is false.
   4143           // The subexpression does not have side effects.
   4144           context()->Plug(var->is_this());
   4145         } else {
   4146           // Non-global variable.  Call the runtime to try to delete from the
   4147           // context where the variable was introduced.
   4148           ASSERT(!context_register().is(a2));
   4149           __ li(a2, Operand(var->name()));
   4150           __ Push(context_register(), a2);
   4151           __ CallRuntime(Runtime::kHiddenDeleteContextSlot, 2);
   4152           context()->Plug(v0);
   4153         }
   4154       } else {
   4155         // Result of deleting non-property, non-variable reference is true.
   4156         // The subexpression may have side effects.
   4157         VisitForEffect(expr->expression());
   4158         context()->Plug(true);
   4159       }
   4160       break;
   4161     }
   4162 
   4163     case Token::VOID: {
   4164       Comment cmnt(masm_, "[ UnaryOperation (VOID)");
   4165       VisitForEffect(expr->expression());
   4166       context()->Plug(Heap::kUndefinedValueRootIndex);
   4167       break;
   4168     }
   4169 
   4170     case Token::NOT: {
   4171       Comment cmnt(masm_, "[ UnaryOperation (NOT)");
   4172       if (context()->IsEffect()) {
   4173         // Unary NOT has no side effects so it's only necessary to visit the
   4174         // subexpression.  Match the optimizing compiler by not branching.
   4175         VisitForEffect(expr->expression());
   4176       } else if (context()->IsTest()) {
   4177         const TestContext* test = TestContext::cast(context());
   4178         // The labels are swapped for the recursive call.
   4179         VisitForControl(expr->expression(),
   4180                         test->false_label(),
   4181                         test->true_label(),
   4182                         test->fall_through());
   4183         context()->Plug(test->true_label(), test->false_label());
   4184       } else {
   4185         // We handle value contexts explicitly rather than simply visiting
   4186         // for control and plugging the control flow into the context,
   4187         // because we need to prepare a pair of extra administrative AST ids
   4188         // for the optimizing compiler.
   4189         ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
   4190         Label materialize_true, materialize_false, done;
   4191         VisitForControl(expr->expression(),
   4192                         &materialize_false,
   4193                         &materialize_true,
   4194                         &materialize_true);
   4195         __ bind(&materialize_true);
   4196         PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
   4197         __ LoadRoot(v0, Heap::kTrueValueRootIndex);
   4198         if (context()->IsStackValue()) __ push(v0);
   4199         __ jmp(&done);
   4200         __ bind(&materialize_false);
   4201         PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
   4202         __ LoadRoot(v0, Heap::kFalseValueRootIndex);
   4203         if (context()->IsStackValue()) __ push(v0);
   4204         __ bind(&done);
   4205       }
   4206       break;
   4207     }
   4208 
   4209     case Token::TYPEOF: {
   4210       Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
   4211       { StackValueContext context(this);
   4212         VisitForTypeofValue(expr->expression());
   4213       }
   4214       __ CallRuntime(Runtime::kTypeof, 1);
   4215       context()->Plug(v0);
   4216       break;
   4217     }
   4218 
   4219     default:
   4220       UNREACHABLE();
   4221   }
   4222 }
   4223 
   4224 
   4225 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
   4226   ASSERT(expr->expression()->IsValidReferenceExpression());
   4227 
   4228   Comment cmnt(masm_, "[ CountOperation");
   4229   SetSourcePosition(expr->position());
   4230 
   4231   // Expression can only be a property, a global or a (parameter or local)
   4232   // slot.
   4233   enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
   4234   LhsKind assign_type = VARIABLE;
   4235   Property* prop = expr->expression()->AsProperty();
   4236   // In case of a property we use the uninitialized expression context
   4237   // of the key to detect a named property.
   4238   if (prop != NULL) {
   4239     assign_type =
   4240         (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
   4241   }
   4242 
   4243   // Evaluate expression and get value.
   4244   if (assign_type == VARIABLE) {
   4245     ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
   4246     AccumulatorValueContext context(this);
   4247     EmitVariableLoad(expr->expression()->AsVariableProxy());
   4248   } else {
   4249     // Reserve space for result of postfix operation.
   4250     if (expr->is_postfix() && !context()->IsEffect()) {
   4251       __ li(at, Operand(Smi::FromInt(0)));
   4252       __ push(at);
   4253     }
   4254     if (assign_type == NAMED_PROPERTY) {
   4255       // Put the object both on the stack and in the accumulator.
   4256       VisitForAccumulatorValue(prop->obj());
   4257       __ push(v0);
   4258       EmitNamedPropertyLoad(prop);
   4259     } else {
   4260       VisitForStackValue(prop->obj());
   4261       VisitForAccumulatorValue(prop->key());
   4262       __ lw(a1, MemOperand(sp, 0));
   4263       __ push(v0);
   4264       EmitKeyedPropertyLoad(prop);
   4265     }
   4266   }
   4267 
   4268   // We need a second deoptimization point after loading the value
   4269   // in case evaluating the property load my have a side effect.
   4270   if (assign_type == VARIABLE) {
   4271     PrepareForBailout(expr->expression(), TOS_REG);
   4272   } else {
   4273     PrepareForBailoutForId(prop->LoadId(), TOS_REG);
   4274   }
   4275 
   4276   // Inline smi case if we are in a loop.
   4277   Label stub_call, done;
   4278   JumpPatchSite patch_site(masm_);
   4279 
   4280   int count_value = expr->op() == Token::INC ? 1 : -1;
   4281   __ mov(a0, v0);
   4282   if (ShouldInlineSmiCase(expr->op())) {
   4283     Label slow;
   4284     patch_site.EmitJumpIfNotSmi(v0, &slow);
   4285 
   4286     // Save result for postfix expressions.
   4287     if (expr->is_postfix()) {
   4288       if (!context()->IsEffect()) {
   4289         // Save the result on the stack. If we have a named or keyed property
   4290         // we store the result under the receiver that is currently on top
   4291         // of the stack.
   4292         switch (assign_type) {
   4293           case VARIABLE:
   4294             __ push(v0);
   4295             break;
   4296           case NAMED_PROPERTY:
   4297             __ sw(v0, MemOperand(sp, kPointerSize));
   4298             break;
   4299           case KEYED_PROPERTY:
   4300             __ sw(v0, MemOperand(sp, 2 * kPointerSize));
   4301             break;
   4302         }
   4303       }
   4304     }
   4305 
   4306     Register scratch1 = a1;
   4307     Register scratch2 = t0;
   4308     __ li(scratch1, Operand(Smi::FromInt(count_value)));
   4309     __ AdduAndCheckForOverflow(v0, v0, scratch1, scratch2);
   4310     __ BranchOnNoOverflow(&done, scratch2);
   4311     // Call stub. Undo operation first.
   4312     __ Move(v0, a0);
   4313     __ jmp(&stub_call);
   4314     __ bind(&slow);
   4315   }
   4316   ToNumberStub convert_stub(isolate());
   4317   __ CallStub(&convert_stub);
   4318 
   4319   // Save result for postfix expressions.
   4320   if (expr->is_postfix()) {
   4321     if (!context()->IsEffect()) {
   4322       // Save the result on the stack. If we have a named or keyed property
   4323       // we store the result under the receiver that is currently on top
   4324       // of the stack.
   4325       switch (assign_type) {
   4326         case VARIABLE:
   4327           __ push(v0);
   4328           break;
   4329         case NAMED_PROPERTY:
   4330           __ sw(v0, MemOperand(sp, kPointerSize));
   4331           break;
   4332         case KEYED_PROPERTY:
   4333           __ sw(v0, MemOperand(sp, 2 * kPointerSize));
   4334           break;
   4335       }
   4336     }
   4337   }
   4338 
   4339   __ bind(&stub_call);
   4340   __ mov(a1, v0);
   4341   __ li(a0, Operand(Smi::FromInt(count_value)));
   4342 
   4343   // Record position before stub call.
   4344   SetSourcePosition(expr->position());
   4345 
   4346   BinaryOpICStub stub(isolate(), Token::ADD, NO_OVERWRITE);
   4347   CallIC(stub.GetCode(), expr->CountBinOpFeedbackId());
   4348   patch_site.EmitPatchInfo();
   4349   __ bind(&done);
   4350 
   4351   // Store the value returned in v0.
   4352   switch (assign_type) {
   4353     case VARIABLE:
   4354       if (expr->is_postfix()) {
   4355         { EffectContext context(this);
   4356           EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
   4357                                  Token::ASSIGN);
   4358           PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   4359           context.Plug(v0);
   4360         }
   4361         // For all contexts except EffectConstant we have the result on
   4362         // top of the stack.
   4363         if (!context()->IsEffect()) {
   4364           context()->PlugTOS();
   4365         }
   4366       } else {
   4367         EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
   4368                                Token::ASSIGN);
   4369         PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   4370         context()->Plug(v0);
   4371       }
   4372       break;
   4373     case NAMED_PROPERTY: {
   4374       __ mov(a0, result_register());  // Value.
   4375       __ li(a2, Operand(prop->key()->AsLiteral()->value()));  // Name.
   4376       __ pop(a1);  // Receiver.
   4377       CallStoreIC(expr->CountStoreFeedbackId());
   4378       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   4379       if (expr->is_postfix()) {
   4380         if (!context()->IsEffect()) {
   4381           context()->PlugTOS();
   4382         }
   4383       } else {
   4384         context()->Plug(v0);
   4385       }
   4386       break;
   4387     }
   4388     case KEYED_PROPERTY: {
   4389       __ mov(a0, result_register());  // Value.
   4390       __ Pop(a2, a1);  // a1 = key, a2 = receiver.
   4391       Handle<Code> ic = strict_mode() == SLOPPY
   4392           ? isolate()->builtins()->KeyedStoreIC_Initialize()
   4393           : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
   4394       CallIC(ic, expr->CountStoreFeedbackId());
   4395       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   4396       if (expr->is_postfix()) {
   4397         if (!context()->IsEffect()) {
   4398           context()->PlugTOS();
   4399         }
   4400       } else {
   4401         context()->Plug(v0);
   4402       }
   4403       break;
   4404     }
   4405   }
   4406 }
   4407 
   4408 
   4409 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
   4410   ASSERT(!context()->IsEffect());
   4411   ASSERT(!context()->IsTest());
   4412   VariableProxy* proxy = expr->AsVariableProxy();
   4413   if (proxy != NULL && proxy->var()->IsUnallocated()) {
   4414     Comment cmnt(masm_, "[ Global variable");
   4415     __ lw(a0, GlobalObjectOperand());
   4416     __ li(a2, Operand(proxy->name()));
   4417     // Use a regular load, not a contextual load, to avoid a reference
   4418     // error.
   4419     CallLoadIC(NOT_CONTEXTUAL);
   4420     PrepareForBailout(expr, TOS_REG);
   4421     context()->Plug(v0);
   4422   } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
   4423     Comment cmnt(masm_, "[ Lookup slot");
   4424     Label done, slow;
   4425 
   4426     // Generate code for loading from variables potentially shadowed
   4427     // by eval-introduced variables.
   4428     EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
   4429 
   4430     __ bind(&slow);
   4431     __ li(a0, Operand(proxy->name()));
   4432     __ Push(cp, a0);
   4433     __ CallRuntime(Runtime::kHiddenLoadContextSlotNoReferenceError, 2);
   4434     PrepareForBailout(expr, TOS_REG);
   4435     __ bind(&done);
   4436 
   4437     context()->Plug(v0);
   4438   } else {
   4439     // This expression cannot throw a reference error at the top level.
   4440     VisitInDuplicateContext(expr);
   4441   }
   4442 }
   4443 
   4444 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
   4445                                                  Expression* sub_expr,
   4446                                                  Handle<String> check) {
   4447   Label materialize_true, materialize_false;
   4448   Label* if_true = NULL;
   4449   Label* if_false = NULL;
   4450   Label* fall_through = NULL;
   4451   context()->PrepareTest(&materialize_true, &materialize_false,
   4452                          &if_true, &if_false, &fall_through);
   4453 
   4454   { AccumulatorValueContext context(this);
   4455     VisitForTypeofValue(sub_expr);
   4456   }
   4457   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   4458 
   4459   Factory* factory = isolate()->factory();
   4460   if (String::Equals(check, factory->number_string())) {
   4461     __ JumpIfSmi(v0, if_true);
   4462     __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
   4463     __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
   4464     Split(eq, v0, Operand(at), if_true, if_false, fall_through);
   4465   } else if (String::Equals(check, factory->string_string())) {
   4466     __ JumpIfSmi(v0, if_false);
   4467     // Check for undetectable objects => false.
   4468     __ GetObjectType(v0, v0, a1);
   4469     __ Branch(if_false, ge, a1, Operand(FIRST_NONSTRING_TYPE));
   4470     __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
   4471     __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
   4472     Split(eq, a1, Operand(zero_reg),
   4473           if_true, if_false, fall_through);
   4474   } else if (String::Equals(check, factory->symbol_string())) {
   4475     __ JumpIfSmi(v0, if_false);
   4476     __ GetObjectType(v0, v0, a1);
   4477     Split(eq, a1, Operand(SYMBOL_TYPE), if_true, if_false, fall_through);
   4478   } else if (String::Equals(check, factory->boolean_string())) {
   4479     __ LoadRoot(at, Heap::kTrueValueRootIndex);
   4480     __ Branch(if_true, eq, v0, Operand(at));
   4481     __ LoadRoot(at, Heap::kFalseValueRootIndex);
   4482     Split(eq, v0, Operand(at), if_true, if_false, fall_through);
   4483   } else if (FLAG_harmony_typeof &&
   4484              String::Equals(check, factory->null_string())) {
   4485     __ LoadRoot(at, Heap::kNullValueRootIndex);
   4486     Split(eq, v0, Operand(at), if_true, if_false, fall_through);
   4487   } else if (String::Equals(check, factory->undefined_string())) {
   4488     __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
   4489     __ Branch(if_true, eq, v0, Operand(at));
   4490     __ JumpIfSmi(v0, if_false);
   4491     // Check for undetectable objects => true.
   4492     __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
   4493     __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
   4494     __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
   4495     Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
   4496   } else if (String::Equals(check, factory->function_string())) {
   4497     __ JumpIfSmi(v0, if_false);
   4498     STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
   4499     __ GetObjectType(v0, v0, a1);
   4500     __ Branch(if_true, eq, a1, Operand(JS_FUNCTION_TYPE));
   4501     Split(eq, a1, Operand(JS_FUNCTION_PROXY_TYPE),
   4502           if_true, if_false, fall_through);
   4503   } else if (String::Equals(check, factory->object_string())) {
   4504     __ JumpIfSmi(v0, if_false);
   4505     if (!FLAG_harmony_typeof) {
   4506       __ LoadRoot(at, Heap::kNullValueRootIndex);
   4507       __ Branch(if_true, eq, v0, Operand(at));
   4508     }
   4509     // Check for JS objects => true.
   4510     __ GetObjectType(v0, v0, a1);
   4511     __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
   4512     __ lbu(a1, FieldMemOperand(v0, Map::kInstanceTypeOffset));
   4513     __ Branch(if_false, gt, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
   4514     // Check for undetectable objects => false.
   4515     __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
   4516     __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
   4517     Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through);
   4518   } else {
   4519     if (if_false != fall_through) __ jmp(if_false);
   4520   }
   4521   context()->Plug(if_true, if_false);
   4522 }
   4523 
   4524 
   4525 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
   4526   Comment cmnt(masm_, "[ CompareOperation");
   4527   SetSourcePosition(expr->position());
   4528 
   4529   // First we try a fast inlined version of the compare when one of
   4530   // the operands is a literal.
   4531   if (TryLiteralCompare(expr)) return;
   4532 
   4533   // Always perform the comparison for its control flow.  Pack the result
   4534   // into the expression's context after the comparison is performed.
   4535   Label materialize_true, materialize_false;
   4536   Label* if_true = NULL;
   4537   Label* if_false = NULL;
   4538   Label* fall_through = NULL;
   4539   context()->PrepareTest(&materialize_true, &materialize_false,
   4540                          &if_true, &if_false, &fall_through);
   4541 
   4542   Token::Value op = expr->op();
   4543   VisitForStackValue(expr->left());
   4544   switch (op) {
   4545     case Token::IN:
   4546       VisitForStackValue(expr->right());
   4547       __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
   4548       PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
   4549       __ LoadRoot(t0, Heap::kTrueValueRootIndex);
   4550       Split(eq, v0, Operand(t0), if_true, if_false, fall_through);
   4551       break;
   4552 
   4553     case Token::INSTANCEOF: {
   4554       VisitForStackValue(expr->right());
   4555       InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
   4556       __ CallStub(&stub);
   4557       PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   4558       // The stub returns 0 for true.
   4559       Split(eq, v0, Operand(zero_reg), if_true, if_false, fall_through);
   4560       break;
   4561     }
   4562 
   4563     default: {
   4564       VisitForAccumulatorValue(expr->right());
   4565       Condition cc = CompareIC::ComputeCondition(op);
   4566       __ mov(a0, result_register());
   4567       __ pop(a1);
   4568 
   4569       bool inline_smi_code = ShouldInlineSmiCase(op);
   4570       JumpPatchSite patch_site(masm_);
   4571       if (inline_smi_code) {
   4572         Label slow_case;
   4573         __ Or(a2, a0, Operand(a1));
   4574         patch_site.EmitJumpIfNotSmi(a2, &slow_case);
   4575         Split(cc, a1, Operand(a0), if_true, if_false, NULL);
   4576         __ bind(&slow_case);
   4577       }
   4578       // Record position and call the compare IC.
   4579       SetSourcePosition(expr->position());
   4580       Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
   4581       CallIC(ic, expr->CompareOperationFeedbackId());
   4582       patch_site.EmitPatchInfo();
   4583       PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   4584       Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
   4585     }
   4586   }
   4587 
   4588   // Convert the result of the comparison into one expected for this
   4589   // expression's context.
   4590   context()->Plug(if_true, if_false);
   4591 }
   4592 
   4593 
   4594 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
   4595                                               Expression* sub_expr,
   4596                                               NilValue nil) {
   4597   Label materialize_true, materialize_false;
   4598   Label* if_true = NULL;
   4599   Label* if_false = NULL;
   4600   Label* fall_through = NULL;
   4601   context()->PrepareTest(&materialize_true, &materialize_false,
   4602                          &if_true, &if_false, &fall_through);
   4603 
   4604   VisitForAccumulatorValue(sub_expr);
   4605   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   4606   __ mov(a0, result_register());
   4607   if (expr->op() == Token::EQ_STRICT) {
   4608     Heap::RootListIndex nil_value = nil == kNullValue ?
   4609         Heap::kNullValueRootIndex :
   4610         Heap::kUndefinedValueRootIndex;
   4611     __ LoadRoot(a1, nil_value);
   4612     Split(eq, a0, Operand(a1), if_true, if_false, fall_through);
   4613   } else {
   4614     Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
   4615     CallIC(ic, expr->CompareOperationFeedbackId());
   4616     Split(ne, v0, Operand(zero_reg), if_true, if_false, fall_through);
   4617   }
   4618   context()->Plug(if_true, if_false);
   4619 }
   4620 
   4621 
   4622 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
   4623   __ lw(v0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
   4624   context()->Plug(v0);
   4625 }
   4626 
   4627 
   4628 Register FullCodeGenerator::result_register() {
   4629   return v0;
   4630 }
   4631 
   4632 
   4633 Register FullCodeGenerator::context_register() {
   4634   return cp;
   4635 }
   4636 
   4637 
   4638 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
   4639   ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
   4640   __ sw(value, MemOperand(fp, frame_offset));
   4641 }
   4642 
   4643 
   4644 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
   4645   __ lw(dst, ContextOperand(cp, context_index));
   4646 }
   4647 
   4648 
   4649 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
   4650   Scope* declaration_scope = scope()->DeclarationScope();
   4651   if (declaration_scope->is_global_scope() ||
   4652       declaration_scope->is_module_scope()) {
   4653     // Contexts nested in the native context have a canonical empty function
   4654     // as their closure, not the anonymous closure containing the global
   4655     // code.  Pass a smi sentinel and let the runtime look up the empty
   4656     // function.
   4657     __ li(at, Operand(Smi::FromInt(0)));
   4658   } else if (declaration_scope->is_eval_scope()) {
   4659     // Contexts created by a call to eval have the same closure as the
   4660     // context calling eval, not the anonymous closure containing the eval
   4661     // code.  Fetch it from the context.
   4662     __ lw(at, ContextOperand(cp, Context::CLOSURE_INDEX));
   4663   } else {
   4664     ASSERT(declaration_scope->is_function_scope());
   4665     __ lw(at, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
   4666   }
   4667   __ push(at);
   4668 }
   4669 
   4670 
   4671 // ----------------------------------------------------------------------------
   4672 // Non-local control flow support.
   4673 
   4674 void FullCodeGenerator::EnterFinallyBlock() {
   4675   ASSERT(!result_register().is(a1));
   4676   // Store result register while executing finally block.
   4677   __ push(result_register());
   4678   // Cook return address in link register to stack (smi encoded Code* delta).
   4679   __ Subu(a1, ra, Operand(masm_->CodeObject()));
   4680   ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
   4681   STATIC_ASSERT(0 == kSmiTag);
   4682   __ Addu(a1, a1, Operand(a1));  // Convert to smi.
   4683 
   4684   // Store result register while executing finally block.
   4685   __ push(a1);
   4686 
   4687   // Store pending message while executing finally block.
   4688   ExternalReference pending_message_obj =
   4689       ExternalReference::address_of_pending_message_obj(isolate());
   4690   __ li(at, Operand(pending_message_obj));
   4691   __ lw(a1, MemOperand(at));
   4692   __ push(a1);
   4693 
   4694   ExternalReference has_pending_message =
   4695       ExternalReference::address_of_has_pending_message(isolate());
   4696   __ li(at, Operand(has_pending_message));
   4697   __ lw(a1, MemOperand(at));
   4698   __ SmiTag(a1);
   4699   __ push(a1);
   4700 
   4701   ExternalReference pending_message_script =
   4702       ExternalReference::address_of_pending_message_script(isolate());
   4703   __ li(at, Operand(pending_message_script));
   4704   __ lw(a1, MemOperand(at));
   4705   __ push(a1);
   4706 }
   4707 
   4708 
   4709 void FullCodeGenerator::ExitFinallyBlock() {
   4710   ASSERT(!result_register().is(a1));
   4711   // Restore pending message from stack.
   4712   __ pop(a1);
   4713   ExternalReference pending_message_script =
   4714       ExternalReference::address_of_pending_message_script(isolate());
   4715   __ li(at, Operand(pending_message_script));
   4716   __ sw(a1, MemOperand(at));
   4717 
   4718   __ pop(a1);
   4719   __ SmiUntag(a1);
   4720   ExternalReference has_pending_message =
   4721       ExternalReference::address_of_has_pending_message(isolate());
   4722   __ li(at, Operand(has_pending_message));
   4723   __ sw(a1, MemOperand(at));
   4724 
   4725   __ pop(a1);
   4726   ExternalReference pending_message_obj =
   4727       ExternalReference::address_of_pending_message_obj(isolate());
   4728   __ li(at, Operand(pending_message_obj));
   4729   __ sw(a1, MemOperand(at));
   4730 
   4731   // Restore result register from stack.
   4732   __ pop(a1);
   4733 
   4734   // Uncook return address and return.
   4735   __ pop(result_register());
   4736   ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
   4737   __ sra(a1, a1, 1);  // Un-smi-tag value.
   4738   __ Addu(at, a1, Operand(masm_->CodeObject()));
   4739   __ Jump(at);
   4740 }
   4741 
   4742 
   4743 #undef __
   4744 
   4745 #define __ ACCESS_MASM(masm())
   4746 
   4747 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
   4748     int* stack_depth,
   4749     int* context_length) {
   4750   // The macros used here must preserve the result register.
   4751 
   4752   // Because the handler block contains the context of the finally
   4753   // code, we can restore it directly from there for the finally code
   4754   // rather than iteratively unwinding contexts via their previous
   4755   // links.
   4756   __ Drop(*stack_depth);  // Down to the handler block.
   4757   if (*context_length > 0) {
   4758     // Restore the context to its dedicated register and the stack.
   4759     __ lw(cp, MemOperand(sp, StackHandlerConstants::kContextOffset));
   4760     __ sw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
   4761   }
   4762   __ PopTryHandler();
   4763   __ Call(finally_entry_);
   4764 
   4765   *stack_depth = 0;
   4766   *context_length = 0;
   4767   return previous_;
   4768 }
   4769 
   4770 
   4771 #undef __
   4772 
   4773 
   4774 void BackEdgeTable::PatchAt(Code* unoptimized_code,
   4775                             Address pc,
   4776                             BackEdgeState target_state,
   4777                             Code* replacement_code) {
   4778   static const int kInstrSize = Assembler::kInstrSize;
   4779   Address branch_address = pc - 6 * kInstrSize;
   4780   CodePatcher patcher(branch_address, 1);
   4781 
   4782   switch (target_state) {
   4783     case INTERRUPT:
   4784       // slt at, a3, zero_reg (in case of count based interrupts)
   4785       // beq at, zero_reg, ok
   4786       // lui t9, <interrupt stub address> upper
   4787       // ori t9, <interrupt stub address> lower
   4788       // jalr t9
   4789       // nop
   4790       // ok-label ----- pc_after points here
   4791       patcher.masm()->slt(at, a3, zero_reg);
   4792       break;
   4793     case ON_STACK_REPLACEMENT:
   4794     case OSR_AFTER_STACK_CHECK:
   4795       // addiu at, zero_reg, 1
   4796       // beq at, zero_reg, ok  ;; Not changed
   4797       // lui t9, <on-stack replacement address> upper
   4798       // ori t9, <on-stack replacement address> lower
   4799       // jalr t9  ;; Not changed
   4800       // nop  ;; Not changed
   4801       // ok-label ----- pc_after points here
   4802       patcher.masm()->addiu(at, zero_reg, 1);
   4803       break;
   4804   }
   4805   Address pc_immediate_load_address = pc - 4 * kInstrSize;
   4806   // Replace the stack check address in the load-immediate (lui/ori pair)
   4807   // with the entry address of the replacement code.
   4808   Assembler::set_target_address_at(pc_immediate_load_address,
   4809                                    replacement_code->entry());
   4810 
   4811   unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
   4812       unoptimized_code, pc_immediate_load_address, replacement_code);
   4813 }
   4814 
   4815 
   4816 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
   4817     Isolate* isolate,
   4818     Code* unoptimized_code,
   4819     Address pc) {
   4820   static const int kInstrSize = Assembler::kInstrSize;
   4821   Address branch_address = pc - 6 * kInstrSize;
   4822   Address pc_immediate_load_address = pc - 4 * kInstrSize;
   4823 
   4824   ASSERT(Assembler::IsBeq(Assembler::instr_at(pc - 5 * kInstrSize)));
   4825   if (!Assembler::IsAddImmediate(Assembler::instr_at(branch_address))) {
   4826     ASSERT(reinterpret_cast<uint32_t>(
   4827         Assembler::target_address_at(pc_immediate_load_address)) ==
   4828            reinterpret_cast<uint32_t>(
   4829                isolate->builtins()->InterruptCheck()->entry()));
   4830     return INTERRUPT;
   4831   }
   4832 
   4833   ASSERT(Assembler::IsAddImmediate(Assembler::instr_at(branch_address)));
   4834 
   4835   if (reinterpret_cast<uint32_t>(
   4836       Assembler::target_address_at(pc_immediate_load_address)) ==
   4837           reinterpret_cast<uint32_t>(
   4838               isolate->builtins()->OnStackReplacement()->entry())) {
   4839     return ON_STACK_REPLACEMENT;
   4840   }
   4841 
   4842   ASSERT(reinterpret_cast<uint32_t>(
   4843       Assembler::target_address_at(pc_immediate_load_address)) ==
   4844          reinterpret_cast<uint32_t>(
   4845              isolate->builtins()->OsrAfterStackCheck()->entry()));
   4846   return OSR_AFTER_STACK_CHECK;
   4847 }
   4848 
   4849 
   4850 } }  // namespace v8::internal
   4851 
   4852 #endif  // V8_TARGET_ARCH_MIPS
   4853