Home | History | Annotate | Download | only in mips64
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #if V8_TARGET_ARCH_MIPS64
      6 
      7 // Note on Mips implementation:
      8 //
      9 // The result_register() for mips is the 'v0' register, which is defined
     10 // by the ABI to contain function return values. However, the first
     11 // parameter to a function is defined to be 'a0'. So there are many
     12 // places where we have to move a previous result in v0 to a0 for the
     13 // next call: mov(a0, v0). This is not needed on the other architectures.
     14 
     15 #include "src/ast/compile-time-value.h"
     16 #include "src/ast/scopes.h"
     17 #include "src/builtins/builtins-constructor.h"
     18 #include "src/code-factory.h"
     19 #include "src/code-stubs.h"
     20 #include "src/codegen.h"
     21 #include "src/compilation-info.h"
     22 #include "src/compiler.h"
     23 #include "src/debug/debug.h"
     24 #include "src/full-codegen/full-codegen.h"
     25 #include "src/ic/ic.h"
     26 
     27 #include "src/mips64/code-stubs-mips64.h"
     28 #include "src/mips64/macro-assembler-mips64.h"
     29 
     30 namespace v8 {
     31 namespace internal {
     32 
     33 #define __ ACCESS_MASM(masm())
     34 
     35 // A patch site is a location in the code which it is possible to patch. This
     36 // class has a number of methods to emit the code which is patchable and the
     37 // method EmitPatchInfo to record a marker back to the patchable code. This
     38 // marker is a andi zero_reg, rx, #yyyy instruction, and rx * 0x0000ffff + yyyy
     39 // (raw 16 bit immediate value is used) is the delta from the pc to the first
     40 // instruction of the patchable code.
     41 // The marker instruction is effectively a NOP (dest is zero_reg) and will
     42 // never be emitted by normal code.
     43 class JumpPatchSite BASE_EMBEDDED {
     44  public:
     45   explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
     46 #ifdef DEBUG
     47     info_emitted_ = false;
     48 #endif
     49   }
     50 
     51   ~JumpPatchSite() {
     52     DCHECK(patch_site_.is_bound() == info_emitted_);
     53   }
     54 
     55   // When initially emitting this ensure that a jump is always generated to skip
     56   // the inlined smi code.
     57   void EmitJumpIfNotSmi(Register reg, Label* target) {
     58     DCHECK(!patch_site_.is_bound() && !info_emitted_);
     59     Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
     60     __ bind(&patch_site_);
     61     __ andi(at, reg, 0);
     62     // Always taken before patched.
     63     __ BranchShort(target, eq, at, Operand(zero_reg));
     64   }
     65 
     66   // When initially emitting this ensure that a jump is never generated to skip
     67   // the inlined smi code.
     68   void EmitJumpIfSmi(Register reg, Label* target) {
     69     Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
     70     DCHECK(!patch_site_.is_bound() && !info_emitted_);
     71     __ bind(&patch_site_);
     72     __ andi(at, reg, 0);
     73     // Never taken before patched.
     74     __ BranchShort(target, ne, at, Operand(zero_reg));
     75   }
     76 
     77   void EmitPatchInfo() {
     78     if (patch_site_.is_bound()) {
     79       int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
     80       Register reg = Register::from_code(delta_to_patch_site / kImm16Mask);
     81       __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask);
     82 #ifdef DEBUG
     83       info_emitted_ = true;
     84 #endif
     85     } else {
     86       __ nop();  // Signals no inlined code.
     87     }
     88   }
     89 
     90  private:
     91   MacroAssembler* masm() { return masm_; }
     92   MacroAssembler* masm_;
     93   Label patch_site_;
     94 #ifdef DEBUG
     95   bool info_emitted_;
     96 #endif
     97 };
     98 
     99 
    100 // Generate code for a JS function.  On entry to the function the receiver
    101 // and arguments have been pushed on the stack left to right.  The actual
    102 // argument count matches the formal parameter count expected by the
    103 // function.
    104 //
    105 // The live registers are:
    106 //   o a1: the JS function object being called (i.e. ourselves)
    107 //   o a3: the new target value
    108 //   o cp: our context
    109 //   o fp: our caller's frame pointer
    110 //   o sp: stack pointer
    111 //   o ra: return address
    112 //
    113 // The function builds a JS frame.  Please see JavaScriptFrameConstants in
    114 // frames-mips.h for its layout.
    115 void FullCodeGenerator::Generate() {
    116   CompilationInfo* info = info_;
    117   profiling_counter_ = isolate()->factory()->NewCell(
    118       Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
    119   SetFunctionPosition(literal());
    120   Comment cmnt(masm_, "[ function compiled by full code generator");
    121 
    122   ProfileEntryHookStub::MaybeCallEntryHook(masm_);
    123 
    124   if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
    125     int receiver_offset = info->scope()->num_parameters() * kPointerSize;
    126     __ ld(a2, MemOperand(sp, receiver_offset));
    127     __ AssertNotSmi(a2);
    128     __ GetObjectType(a2, a2, a2);
    129     __ Check(ge, kSloppyFunctionExpectsJSReceiverReceiver, a2,
    130              Operand(FIRST_JS_RECEIVER_TYPE));
    131   }
    132 
    133   // Open a frame scope to indicate that there is a frame on the stack.  The
    134   // MANUAL indicates that the scope shouldn't actually generate code to set up
    135   // the frame (that is done below).
    136   FrameScope frame_scope(masm_, StackFrame::MANUAL);
    137   info->set_prologue_offset(masm_->pc_offset());
    138   __ Prologue(info->GeneratePreagedPrologue());
    139 
    140   // Increment invocation count for the function.
    141   {
    142     Comment cmnt(masm_, "[ Increment invocation count");
    143     __ ld(a0, FieldMemOperand(a1, JSFunction::kFeedbackVectorOffset));
    144     __ ld(a0, FieldMemOperand(a0, Cell::kValueOffset));
    145     __ ld(a4, FieldMemOperand(
    146                   a0, FeedbackVector::kInvocationCountIndex * kPointerSize +
    147                           FeedbackVector::kHeaderSize));
    148     __ Daddu(a4, a4, Operand(Smi::FromInt(1)));
    149     __ sd(a4, FieldMemOperand(
    150                   a0, FeedbackVector::kInvocationCountIndex * kPointerSize +
    151                           FeedbackVector::kHeaderSize));
    152   }
    153 
    154   { Comment cmnt(masm_, "[ Allocate locals");
    155     int locals_count = info->scope()->num_stack_slots();
    156     OperandStackDepthIncrement(locals_count);
    157     if (locals_count > 0) {
    158       if (locals_count >= 128) {
    159         Label ok;
    160         __ Dsubu(t1, sp, Operand(locals_count * kPointerSize));
    161         __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
    162         __ Branch(&ok, hs, t1, Operand(a2));
    163         __ CallRuntime(Runtime::kThrowStackOverflow);
    164         __ bind(&ok);
    165       }
    166       __ LoadRoot(t1, Heap::kUndefinedValueRootIndex);
    167       int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
    168       if (locals_count >= kMaxPushes) {
    169         int loop_iterations = locals_count / kMaxPushes;
    170         __ li(a2, Operand(loop_iterations));
    171         Label loop_header;
    172         __ bind(&loop_header);
    173         // Do pushes.
    174         __ Dsubu(sp, sp, Operand(kMaxPushes * kPointerSize));
    175         for (int i = 0; i < kMaxPushes; i++) {
    176           __ sd(t1, MemOperand(sp, i * kPointerSize));
    177         }
    178         // Continue loop if not done.
    179         __ Dsubu(a2, a2, Operand(1));
    180         __ Branch(&loop_header, ne, a2, Operand(zero_reg));
    181       }
    182       int remaining = locals_count % kMaxPushes;
    183       // Emit the remaining pushes.
    184       __ Dsubu(sp, sp, Operand(remaining * kPointerSize));
    185       for (int i  = 0; i < remaining; i++) {
    186         __ sd(t1, MemOperand(sp, i * kPointerSize));
    187       }
    188     }
    189   }
    190 
    191   bool function_in_register_a1 = true;
    192 
    193   // Possibly allocate a local context.
    194   if (info->scope()->NeedsContext()) {
    195     Comment cmnt(masm_, "[ Allocate context");
    196     // Argument to NewContext is the function, which is still in a1.
    197     bool need_write_barrier = true;
    198     int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
    199     if (info->scope()->is_script_scope()) {
    200       __ push(a1);
    201       __ Push(info->scope()->scope_info());
    202       __ CallRuntime(Runtime::kNewScriptContext);
    203       PrepareForBailoutForId(BailoutId::ScriptContext(),
    204                              BailoutState::TOS_REGISTER);
    205       // The new target value is not used, clobbering is safe.
    206       DCHECK_NULL(info->scope()->new_target_var());
    207     } else {
    208       if (info->scope()->new_target_var() != nullptr) {
    209         __ push(a3);  // Preserve new target.
    210       }
    211       if (slots <=
    212           ConstructorBuiltinsAssembler::MaximumFunctionContextSlots()) {
    213         Callable callable = CodeFactory::FastNewFunctionContext(
    214             isolate(), info->scope()->scope_type());
    215         __ li(FastNewFunctionContextDescriptor::SlotsRegister(),
    216               Operand(slots));
    217         __ Call(callable.code(), RelocInfo::CODE_TARGET);
    218         // Result of the FastNewFunctionContext builtin is always in new space.
    219         need_write_barrier = false;
    220       } else {
    221         __ push(a1);
    222         __ Push(Smi::FromInt(info->scope()->scope_type()));
    223         __ CallRuntime(Runtime::kNewFunctionContext);
    224       }
    225       if (info->scope()->new_target_var() != nullptr) {
    226         __ pop(a3);  // Restore new target.
    227       }
    228     }
    229     function_in_register_a1 = false;
    230     // Context is returned in v0. It replaces the context passed to us.
    231     // It's saved in the stack and kept live in cp.
    232     __ mov(cp, v0);
    233     __ sd(v0, MemOperand(fp, StandardFrameConstants::kContextOffset));
    234     // Copy any necessary parameters into the context.
    235     int num_parameters = info->scope()->num_parameters();
    236     int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
    237     for (int i = first_parameter; i < num_parameters; i++) {
    238       Variable* var =
    239           (i == -1) ? info->scope()->receiver() : info->scope()->parameter(i);
    240       if (var->IsContextSlot()) {
    241         int parameter_offset = StandardFrameConstants::kCallerSPOffset +
    242                                  (num_parameters - 1 - i) * kPointerSize;
    243         // Load parameter from stack.
    244         __ ld(a0, MemOperand(fp, parameter_offset));
    245         // Store it in the context.
    246         MemOperand target = ContextMemOperand(cp, var->index());
    247         __ sd(a0, target);
    248 
    249         // Update the write barrier.
    250         if (need_write_barrier) {
    251           __ RecordWriteContextSlot(cp, target.offset(), a0, a2,
    252                                     kRAHasBeenSaved, kDontSaveFPRegs);
    253         } else if (FLAG_debug_code) {
    254           Label done;
    255           __ JumpIfInNewSpace(cp, a0, &done);
    256           __ Abort(kExpectedNewSpaceObject);
    257           __ bind(&done);
    258         }
    259       }
    260     }
    261   }
    262 
    263   // Register holding this function and new target are both trashed in case we
    264   // bailout here. But since that can happen only when new target is not used
    265   // and we allocate a context, the value of |function_in_register| is correct.
    266   PrepareForBailoutForId(BailoutId::FunctionContext(),
    267                          BailoutState::NO_REGISTERS);
    268 
    269   // We don't support new.target and rest parameters here.
    270   DCHECK_NULL(info->scope()->new_target_var());
    271   DCHECK_NULL(info->scope()->rest_parameter());
    272   DCHECK_NULL(info->scope()->this_function_var());
    273 
    274   Variable* arguments = info->scope()->arguments();
    275   if (arguments != NULL) {
    276     // Function uses arguments object.
    277     Comment cmnt(masm_, "[ Allocate arguments object");
    278     if (!function_in_register_a1) {
    279       // Load this again, if it's used by the local context below.
    280       __ ld(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
    281     }
    282     if (is_strict(language_mode()) || !has_simple_parameters()) {
    283       Callable callable = CodeFactory::FastNewStrictArguments(isolate());
    284       __ Call(callable.code(), RelocInfo::CODE_TARGET);
    285       RestoreContext();
    286     } else if (literal()->has_duplicate_parameters()) {
    287       __ Push(a1);
    288       __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
    289     } else {
    290       Callable callable = CodeFactory::FastNewSloppyArguments(isolate());
    291       __ Call(callable.code(), RelocInfo::CODE_TARGET);
    292       RestoreContext();
    293     }
    294 
    295     SetVar(arguments, v0, a1, a2);
    296   }
    297 
    298   if (FLAG_trace) {
    299     __ CallRuntime(Runtime::kTraceEnter);
    300   }
    301 
    302   // Visit the declarations and body.
    303   PrepareForBailoutForId(BailoutId::FunctionEntry(),
    304                          BailoutState::NO_REGISTERS);
    305   {
    306     Comment cmnt(masm_, "[ Declarations");
    307     VisitDeclarations(scope()->declarations());
    308   }
    309 
    310   // Assert that the declarations do not use ICs. Otherwise the debugger
    311   // won't be able to redirect a PC at an IC to the correct IC in newly
    312   // recompiled code.
    313   DCHECK_EQ(0, ic_total_count_);
    314 
    315   {
    316     Comment cmnt(masm_, "[ Stack check");
    317     PrepareForBailoutForId(BailoutId::Declarations(),
    318                            BailoutState::NO_REGISTERS);
    319     Label ok;
    320     __ LoadRoot(at, Heap::kStackLimitRootIndex);
    321     __ Branch(&ok, hs, sp, Operand(at));
    322     Handle<Code> stack_check = isolate()->builtins()->StackCheck();
    323     PredictableCodeSizeScope predictable(
    324         masm_, masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
    325     __ Call(stack_check, RelocInfo::CODE_TARGET);
    326     __ bind(&ok);
    327   }
    328 
    329   {
    330     Comment cmnt(masm_, "[ Body");
    331     DCHECK(loop_depth() == 0);
    332 
    333     VisitStatements(literal()->body());
    334 
    335     DCHECK(loop_depth() == 0);
    336   }
    337 
    338   // Always emit a 'return undefined' in case control fell off the end of
    339   // the body.
    340   { Comment cmnt(masm_, "[ return <undefined>;");
    341     __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
    342   }
    343   EmitReturnSequence();
    344 }
    345 
    346 
    347 void FullCodeGenerator::ClearAccumulator() {
    348   DCHECK(Smi::kZero == 0);
    349   __ mov(v0, zero_reg);
    350 }
    351 
    352 
    353 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
    354   __ li(a2, Operand(profiling_counter_));
    355   __ ld(a3, FieldMemOperand(a2, Cell::kValueOffset));
    356   __ Dsubu(a3, a3, Operand(Smi::FromInt(delta)));
    357   __ sd(a3, FieldMemOperand(a2, Cell::kValueOffset));
    358 }
    359 
    360 
    361 void FullCodeGenerator::EmitProfilingCounterReset() {
    362   int reset_value = FLAG_interrupt_budget;
    363   if (info_->is_debug()) {
    364     // Detect debug break requests as soon as possible.
    365     reset_value = FLAG_interrupt_budget >> 4;
    366   }
    367   __ li(a2, Operand(profiling_counter_));
    368   __ li(a3, Operand(Smi::FromInt(reset_value)));
    369   __ sd(a3, FieldMemOperand(a2, Cell::kValueOffset));
    370 }
    371 
    372 
    373 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
    374                                                 Label* back_edge_target) {
    375   // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need
    376   // to make sure it is constant. Branch may emit a skip-or-jump sequence
    377   // instead of the normal Branch. It seems that the "skip" part of that
    378   // sequence is about as long as this Branch would be so it is safe to ignore
    379   // that.
    380   Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
    381   Comment cmnt(masm_, "[ Back edge bookkeeping");
    382   Label ok;
    383   DCHECK(back_edge_target->is_bound());
    384   int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
    385   int weight = Min(kMaxBackEdgeWeight,
    386                    Max(1, distance / kCodeSizeMultiplier));
    387   EmitProfilingCounterDecrement(weight);
    388   __ slt(at, a3, zero_reg);
    389   __ beq(at, zero_reg, &ok);
    390   // Call will emit a li t9 first, so it is safe to use the delay slot.
    391   __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
    392   // Record a mapping of this PC offset to the OSR id.  This is used to find
    393   // the AST id from the unoptimized code in order to use it as a key into
    394   // the deoptimization input data found in the optimized code.
    395   RecordBackEdge(stmt->OsrEntryId());
    396   EmitProfilingCounterReset();
    397 
    398   __ bind(&ok);
    399   PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
    400   // Record a mapping of the OSR id to this PC.  This is used if the OSR
    401   // entry becomes the target of a bailout.  We don't expect it to be, but
    402   // we want it to work if it is.
    403   PrepareForBailoutForId(stmt->OsrEntryId(), BailoutState::NO_REGISTERS);
    404 }
    405 
    406 void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
    407     bool is_tail_call) {
    408   // Pretend that the exit is a backwards jump to the entry.
    409   int weight = 1;
    410   if (info_->ShouldSelfOptimize()) {
    411     weight = FLAG_interrupt_budget / FLAG_self_opt_count;
    412   } else {
    413     int distance = masm_->pc_offset();
    414     weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
    415   }
    416   EmitProfilingCounterDecrement(weight);
    417   Label ok;
    418   __ Branch(&ok, ge, a3, Operand(zero_reg));
    419   // Don't need to save result register if we are going to do a tail call.
    420   if (!is_tail_call) {
    421     __ push(v0);
    422   }
    423   __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
    424   if (!is_tail_call) {
    425     __ pop(v0);
    426   }
    427   EmitProfilingCounterReset();
    428   __ bind(&ok);
    429 }
    430 
    431 void FullCodeGenerator::EmitReturnSequence() {
    432   Comment cmnt(masm_, "[ Return sequence");
    433   if (return_label_.is_bound()) {
    434     __ Branch(&return_label_);
    435   } else {
    436     __ bind(&return_label_);
    437     if (FLAG_trace) {
    438       // Push the return value on the stack as the parameter.
    439       // Runtime::TraceExit returns its parameter in v0.
    440       __ push(v0);
    441       __ CallRuntime(Runtime::kTraceExit);
    442     }
    443     EmitProfilingCounterHandlingForReturnSequence(false);
    444 
    445     // Make sure that the constant pool is not emitted inside of the return
    446     // sequence.
    447     { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
    448       int32_t arg_count = info_->scope()->num_parameters() + 1;
    449       int32_t sp_delta = arg_count * kPointerSize;
    450       SetReturnPosition(literal());
    451       __ mov(sp, fp);
    452       __ MultiPop(static_cast<RegList>(fp.bit() | ra.bit()));
    453       __ Daddu(sp, sp, Operand(sp_delta));
    454       __ Jump(ra);
    455     }
    456   }
    457 }
    458 
    459 void FullCodeGenerator::RestoreContext() {
    460   __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
    461 }
    462 
    463 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
    464   DCHECK(var->IsStackAllocated() || var->IsContextSlot());
    465   codegen()->GetVar(result_register(), var);
    466   codegen()->PushOperand(result_register());
    467 }
    468 
    469 
    470 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
    471 }
    472 
    473 
    474 void FullCodeGenerator::AccumulatorValueContext::Plug(
    475     Heap::RootListIndex index) const {
    476   __ LoadRoot(result_register(), index);
    477 }
    478 
    479 
    480 void FullCodeGenerator::StackValueContext::Plug(
    481     Heap::RootListIndex index) const {
    482   __ LoadRoot(result_register(), index);
    483   codegen()->PushOperand(result_register());
    484 }
    485 
    486 
    487 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
    488   codegen()->PrepareForBailoutBeforeSplit(condition(),
    489                                           true,
    490                                           true_label_,
    491                                           false_label_);
    492   if (index == Heap::kUndefinedValueRootIndex ||
    493       index == Heap::kNullValueRootIndex ||
    494       index == Heap::kFalseValueRootIndex) {
    495     if (false_label_ != fall_through_) __ Branch(false_label_);
    496   } else if (index == Heap::kTrueValueRootIndex) {
    497     if (true_label_ != fall_through_) __ Branch(true_label_);
    498   } else {
    499     __ LoadRoot(result_register(), index);
    500     codegen()->DoTest(this);
    501   }
    502 }
    503 
    504 
    505 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
    506 }
    507 
    508 
    509 void FullCodeGenerator::AccumulatorValueContext::Plug(
    510     Handle<Object> lit) const {
    511   __ li(result_register(), Operand(lit));
    512 }
    513 
    514 
    515 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
    516   // Immediates cannot be pushed directly.
    517   __ li(result_register(), Operand(lit));
    518   codegen()->PushOperand(result_register());
    519 }
    520 
    521 
    522 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
    523   codegen()->PrepareForBailoutBeforeSplit(condition(),
    524                                           true,
    525                                           true_label_,
    526                                           false_label_);
    527   DCHECK(lit->IsNullOrUndefined(isolate()) || !lit->IsUndetectable());
    528   if (lit->IsNullOrUndefined(isolate()) || lit->IsFalse(isolate())) {
    529     if (false_label_ != fall_through_) __ Branch(false_label_);
    530   } else if (lit->IsTrue(isolate()) || lit->IsJSObject()) {
    531     if (true_label_ != fall_through_) __ Branch(true_label_);
    532   } else if (lit->IsString()) {
    533     if (String::cast(*lit)->length() == 0) {
    534       if (false_label_ != fall_through_) __ Branch(false_label_);
    535     } else {
    536       if (true_label_ != fall_through_) __ Branch(true_label_);
    537     }
    538   } else if (lit->IsSmi()) {
    539     if (Smi::cast(*lit)->value() == 0) {
    540       if (false_label_ != fall_through_) __ Branch(false_label_);
    541     } else {
    542       if (true_label_ != fall_through_) __ Branch(true_label_);
    543     }
    544   } else {
    545     // For simplicity we always test the accumulator register.
    546     __ li(result_register(), Operand(lit));
    547     codegen()->DoTest(this);
    548   }
    549 }
    550 
    551 
    552 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
    553                                                        Register reg) const {
    554   DCHECK(count > 0);
    555   if (count > 1) codegen()->DropOperands(count - 1);
    556   __ sd(reg, MemOperand(sp, 0));
    557 }
    558 
    559 
    560 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
    561                                             Label* materialize_false) const {
    562   DCHECK(materialize_true == materialize_false);
    563   __ bind(materialize_true);
    564 }
    565 
    566 
    567 void FullCodeGenerator::AccumulatorValueContext::Plug(
    568     Label* materialize_true,
    569     Label* materialize_false) const {
    570   Label done;
    571   __ bind(materialize_true);
    572   __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
    573   __ Branch(&done);
    574   __ bind(materialize_false);
    575   __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
    576   __ bind(&done);
    577 }
    578 
    579 
    580 void FullCodeGenerator::StackValueContext::Plug(
    581     Label* materialize_true,
    582     Label* materialize_false) const {
    583   codegen()->OperandStackDepthIncrement(1);
    584   Label done;
    585   __ bind(materialize_true);
    586   __ LoadRoot(at, Heap::kTrueValueRootIndex);
    587   // Push the value as the following branch can clobber at in long branch mode.
    588   __ push(at);
    589   __ Branch(&done);
    590   __ bind(materialize_false);
    591   __ LoadRoot(at, Heap::kFalseValueRootIndex);
    592   __ push(at);
    593   __ bind(&done);
    594 }
    595 
    596 
    597 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
    598                                           Label* materialize_false) const {
    599   DCHECK(materialize_true == true_label_);
    600   DCHECK(materialize_false == false_label_);
    601 }
    602 
    603 
    604 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
    605   Heap::RootListIndex value_root_index =
    606       flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
    607   __ LoadRoot(result_register(), value_root_index);
    608 }
    609 
    610 
    611 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
    612   Heap::RootListIndex value_root_index =
    613       flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
    614   __ LoadRoot(at, value_root_index);
    615   codegen()->PushOperand(at);
    616 }
    617 
    618 
    619 void FullCodeGenerator::TestContext::Plug(bool flag) const {
    620   codegen()->PrepareForBailoutBeforeSplit(condition(),
    621                                           true,
    622                                           true_label_,
    623                                           false_label_);
    624   if (flag) {
    625     if (true_label_ != fall_through_) __ Branch(true_label_);
    626   } else {
    627     if (false_label_ != fall_through_) __ Branch(false_label_);
    628   }
    629 }
    630 
    631 
    632 void FullCodeGenerator::DoTest(Expression* condition,
    633                                Label* if_true,
    634                                Label* if_false,
    635                                Label* fall_through) {
    636   __ mov(a0, result_register());
    637   Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate());
    638   CallIC(ic, condition->test_id());
    639   __ LoadRoot(at, Heap::kTrueValueRootIndex);
    640   Split(eq, result_register(), Operand(at), if_true, if_false, fall_through);
    641 }
    642 
    643 
    644 void FullCodeGenerator::Split(Condition cc,
    645                               Register lhs,
    646                               const Operand&  rhs,
    647                               Label* if_true,
    648                               Label* if_false,
    649                               Label* fall_through) {
    650   if (if_false == fall_through) {
    651     __ Branch(if_true, cc, lhs, rhs);
    652   } else if (if_true == fall_through) {
    653     __ Branch(if_false, NegateCondition(cc), lhs, rhs);
    654   } else {
    655     __ Branch(if_true, cc, lhs, rhs);
    656     __ Branch(if_false);
    657   }
    658 }
    659 
    660 
    661 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
    662   DCHECK(var->IsStackAllocated());
    663   // Offset is negative because higher indexes are at lower addresses.
    664   int offset = -var->index() * kPointerSize;
    665   // Adjust by a (parameter or local) base offset.
    666   if (var->IsParameter()) {
    667     offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
    668   } else {
    669     offset += JavaScriptFrameConstants::kLocal0Offset;
    670   }
    671   return MemOperand(fp, offset);
    672 }
    673 
    674 
    675 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
    676   DCHECK(var->IsContextSlot() || var->IsStackAllocated());
    677   if (var->IsContextSlot()) {
    678     int context_chain_length = scope()->ContextChainLength(var->scope());
    679     __ LoadContext(scratch, context_chain_length);
    680     return ContextMemOperand(scratch, var->index());
    681   } else {
    682     return StackOperand(var);
    683   }
    684 }
    685 
    686 
    687 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
    688   // Use destination as scratch.
    689   MemOperand location = VarOperand(var, dest);
    690   __ ld(dest, location);
    691 }
    692 
    693 
    694 void FullCodeGenerator::SetVar(Variable* var,
    695                                Register src,
    696                                Register scratch0,
    697                                Register scratch1) {
    698   DCHECK(var->IsContextSlot() || var->IsStackAllocated());
    699   DCHECK(!scratch0.is(src));
    700   DCHECK(!scratch0.is(scratch1));
    701   DCHECK(!scratch1.is(src));
    702   MemOperand location = VarOperand(var, scratch0);
    703   __ sd(src, location);
    704   // Emit the write barrier code if the location is in the heap.
    705   if (var->IsContextSlot()) {
    706     __ RecordWriteContextSlot(scratch0,
    707                               location.offset(),
    708                               src,
    709                               scratch1,
    710                               kRAHasBeenSaved,
    711                               kDontSaveFPRegs);
    712   }
    713 }
    714 
    715 
    716 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
    717                                                      bool should_normalize,
    718                                                      Label* if_true,
    719                                                      Label* if_false) {
    720   // Only prepare for bailouts before splits if we're in a test
    721   // context. Otherwise, we let the Visit function deal with the
    722   // preparation to avoid preparing with the same AST id twice.
    723   if (!context()->IsTest()) return;
    724 
    725   Label skip;
    726   if (should_normalize) __ Branch(&skip);
    727   PrepareForBailout(expr, BailoutState::TOS_REGISTER);
    728   if (should_normalize) {
    729     __ LoadRoot(a4, Heap::kTrueValueRootIndex);
    730     Split(eq, v0, Operand(a4), if_true, if_false, NULL);
    731     __ bind(&skip);
    732   }
    733 }
    734 
    735 
    736 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
    737   // The variable in the declaration always resides in the current function
    738   // context.
    739   DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
    740   if (FLAG_debug_code) {
    741     // Check that we're not inside a with or catch context.
    742     __ ld(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
    743     __ LoadRoot(a4, Heap::kWithContextMapRootIndex);
    744     __ Check(ne, kDeclarationInWithContext,
    745         a1, Operand(a4));
    746     __ LoadRoot(a4, Heap::kCatchContextMapRootIndex);
    747     __ Check(ne, kDeclarationInCatchContext,
    748         a1, Operand(a4));
    749   }
    750 }
    751 
    752 
    753 void FullCodeGenerator::VisitVariableDeclaration(
    754     VariableDeclaration* declaration) {
    755   VariableProxy* proxy = declaration->proxy();
    756   Variable* variable = proxy->var();
    757   switch (variable->location()) {
    758     case VariableLocation::UNALLOCATED: {
    759       DCHECK(!variable->binding_needs_init());
    760       globals_->Add(variable->name(), zone());
    761       FeedbackSlot slot = proxy->VariableFeedbackSlot();
    762       DCHECK(!slot.IsInvalid());
    763       globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
    764       globals_->Add(isolate()->factory()->undefined_value(), zone());
    765       globals_->Add(isolate()->factory()->undefined_value(), zone());
    766       break;
    767     }
    768     case VariableLocation::PARAMETER:
    769     case VariableLocation::LOCAL:
    770       if (variable->binding_needs_init()) {
    771         Comment cmnt(masm_, "[ VariableDeclaration");
    772         __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
    773         __ sd(a4, StackOperand(variable));
    774       }
    775       break;
    776 
    777     case VariableLocation::CONTEXT:
    778       if (variable->binding_needs_init()) {
    779         Comment cmnt(masm_, "[ VariableDeclaration");
    780         EmitDebugCheckDeclarationContext(variable);
    781           __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
    782           __ sd(at, ContextMemOperand(cp, variable->index()));
    783           // No write barrier since the_hole_value is in old space.
    784           PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
    785       }
    786       break;
    787 
    788     case VariableLocation::LOOKUP:
    789     case VariableLocation::MODULE:
    790       UNREACHABLE();
    791   }
    792 }
    793 
    794 
    795 void FullCodeGenerator::VisitFunctionDeclaration(
    796     FunctionDeclaration* declaration) {
    797   VariableProxy* proxy = declaration->proxy();
    798   Variable* variable = proxy->var();
    799   switch (variable->location()) {
    800     case VariableLocation::UNALLOCATED: {
    801       globals_->Add(variable->name(), zone());
    802       FeedbackSlot slot = proxy->VariableFeedbackSlot();
    803       DCHECK(!slot.IsInvalid());
    804       globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
    805 
    806       // We need the slot where the literals array lives, too.
    807       slot = declaration->fun()->LiteralFeedbackSlot();
    808       DCHECK(!slot.IsInvalid());
    809       globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
    810 
    811       Handle<SharedFunctionInfo> function =
    812           Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
    813       // Check for stack-overflow exception.
    814       if (function.is_null()) return SetStackOverflow();
    815       globals_->Add(function, zone());
    816       break;
    817     }
    818 
    819     case VariableLocation::PARAMETER:
    820     case VariableLocation::LOCAL: {
    821       Comment cmnt(masm_, "[ FunctionDeclaration");
    822       VisitForAccumulatorValue(declaration->fun());
    823       __ sd(result_register(), StackOperand(variable));
    824       break;
    825     }
    826 
    827     case VariableLocation::CONTEXT: {
    828       Comment cmnt(masm_, "[ FunctionDeclaration");
    829       EmitDebugCheckDeclarationContext(variable);
    830       VisitForAccumulatorValue(declaration->fun());
    831       __ sd(result_register(), ContextMemOperand(cp, variable->index()));
    832       int offset = Context::SlotOffset(variable->index());
    833       // We know that we have written a function, which is not a smi.
    834       __ RecordWriteContextSlot(cp,
    835                                 offset,
    836                                 result_register(),
    837                                 a2,
    838                                 kRAHasBeenSaved,
    839                                 kDontSaveFPRegs,
    840                                 EMIT_REMEMBERED_SET,
    841                                 OMIT_SMI_CHECK);
    842       PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
    843       break;
    844     }
    845 
    846     case VariableLocation::LOOKUP:
    847     case VariableLocation::MODULE:
    848       UNREACHABLE();
    849   }
    850 }
    851 
    852 
    853 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
    854   // Call the runtime to declare the globals.
    855   __ li(a1, Operand(pairs));
    856   __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
    857   __ EmitLoadFeedbackVector(a2);
    858   __ Push(a1, a0, a2);
    859   __ CallRuntime(Runtime::kDeclareGlobals);
    860   // Return value is ignored.
    861 }
    862 
    863 
    864 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
    865   Comment cmnt(masm_, "[ SwitchStatement");
    866   Breakable nested_statement(this, stmt);
    867   SetStatementPosition(stmt);
    868 
    869   // Keep the switch value on the stack until a case matches.
    870   VisitForStackValue(stmt->tag());
    871   PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
    872 
    873   ZoneList<CaseClause*>* clauses = stmt->cases();
    874   CaseClause* default_clause = NULL;  // Can occur anywhere in the list.
    875 
    876   Label next_test;  // Recycled for each test.
    877   // Compile all the tests with branches to their bodies.
    878   for (int i = 0; i < clauses->length(); i++) {
    879     CaseClause* clause = clauses->at(i);
    880     clause->body_target()->Unuse();
    881 
    882     // The default is not a test, but remember it as final fall through.
    883     if (clause->is_default()) {
    884       default_clause = clause;
    885       continue;
    886     }
    887 
    888     Comment cmnt(masm_, "[ Case comparison");
    889     __ bind(&next_test);
    890     next_test.Unuse();
    891 
    892     // Compile the label expression.
    893     VisitForAccumulatorValue(clause->label());
    894     __ mov(a0, result_register());  // CompareStub requires args in a0, a1.
    895 
    896     // Perform the comparison as if via '==='.
    897     __ ld(a1, MemOperand(sp, 0));  // Switch value.
    898     bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
    899     JumpPatchSite patch_site(masm_);
    900     if (inline_smi_code) {
    901       Label slow_case;
    902       __ or_(a2, a1, a0);
    903       patch_site.EmitJumpIfNotSmi(a2, &slow_case);
    904 
    905       __ Branch(&next_test, ne, a1, Operand(a0));
    906       __ Drop(1);  // Switch value is no longer needed.
    907       __ Branch(clause->body_target());
    908 
    909       __ bind(&slow_case);
    910     }
    911 
    912     // Record position before stub call for type feedback.
    913     SetExpressionPosition(clause);
    914     Handle<Code> ic =
    915         CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
    916     CallIC(ic, clause->CompareId());
    917     patch_site.EmitPatchInfo();
    918 
    919     Label skip;
    920     __ Branch(&skip);
    921     PrepareForBailout(clause, BailoutState::TOS_REGISTER);
    922     __ LoadRoot(at, Heap::kTrueValueRootIndex);
    923     __ Branch(&next_test, ne, v0, Operand(at));
    924     __ Drop(1);
    925     __ Branch(clause->body_target());
    926     __ bind(&skip);
    927 
    928     __ Branch(&next_test, ne, v0, Operand(zero_reg));
    929     __ Drop(1);  // Switch value is no longer needed.
    930     __ Branch(clause->body_target());
    931   }
    932 
    933   // Discard the test value and jump to the default if present, otherwise to
    934   // the end of the statement.
    935   __ bind(&next_test);
    936   DropOperands(1);  // Switch value is no longer needed.
    937   if (default_clause == NULL) {
    938     __ Branch(nested_statement.break_label());
    939   } else {
    940     __ Branch(default_clause->body_target());
    941   }
    942 
    943   // Compile all the case bodies.
    944   for (int i = 0; i < clauses->length(); i++) {
    945     Comment cmnt(masm_, "[ Case body");
    946     CaseClause* clause = clauses->at(i);
    947     __ bind(clause->body_target());
    948     PrepareForBailoutForId(clause->EntryId(), BailoutState::NO_REGISTERS);
    949     VisitStatements(clause->statements());
    950   }
    951 
    952   __ bind(nested_statement.break_label());
    953   PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
    954 }
    955 
    956 
    957 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
    958   Comment cmnt(masm_, "[ ForInStatement");
    959   SetStatementPosition(stmt, SKIP_BREAK);
    960 
    961   FeedbackSlot slot = stmt->ForInFeedbackSlot();
    962 
    963   // Get the object to enumerate over. If the object is null or undefined, skip
    964   // over the loop.  See ECMA-262 version 5, section 12.6.4.
    965   SetExpressionAsStatementPosition(stmt->enumerable());
    966   VisitForAccumulatorValue(stmt->enumerable());
    967   __ mov(a0, result_register());
    968   OperandStackDepthIncrement(5);
    969 
    970   Label loop, exit;
    971   Iteration loop_statement(this, stmt);
    972   increment_loop_depth();
    973 
    974   // If the object is null or undefined, skip over the loop, otherwise convert
    975   // it to a JS receiver.  See ECMA-262 version 5, section 12.6.4.
    976   Label convert, done_convert;
    977   __ JumpIfSmi(a0, &convert);
    978   __ GetObjectType(a0, a1, a1);
    979   __ Branch(USE_DELAY_SLOT, &done_convert, ge, a1,
    980             Operand(FIRST_JS_RECEIVER_TYPE));
    981   __ LoadRoot(at, Heap::kNullValueRootIndex);  // In delay slot.
    982   __ Branch(USE_DELAY_SLOT, &exit, eq, a0, Operand(at));
    983   __ LoadRoot(at, Heap::kUndefinedValueRootIndex);  // In delay slot.
    984   __ Branch(&exit, eq, a0, Operand(at));
    985   __ bind(&convert);
    986   __ Call(isolate()->builtins()->ToObject(), RelocInfo::CODE_TARGET);
    987   RestoreContext();
    988   __ mov(a0, v0);
    989   __ bind(&done_convert);
    990   PrepareForBailoutForId(stmt->ToObjectId(), BailoutState::TOS_REGISTER);
    991   __ push(a0);
    992 
    993   // Check cache validity in generated code. If we cannot guarantee cache
    994   // validity, call the runtime system to check cache validity or get the
    995   // property names in a fixed array. Note: Proxies never have an enum cache,
    996   // so will always take the slow path.
    997   Label call_runtime;
    998   __ CheckEnumCache(&call_runtime);
    999 
   1000   // The enum cache is valid.  Load the map of the object being
   1001   // iterated over and use the cache for the iteration.
   1002   Label use_cache;
   1003   __ ld(v0, FieldMemOperand(a0, HeapObject::kMapOffset));
   1004   __ Branch(&use_cache);
   1005 
   1006   // Get the set of properties to enumerate.
   1007   __ bind(&call_runtime);
   1008   __ push(a0);  // Duplicate the enumerable object on the stack.
   1009   __ CallRuntime(Runtime::kForInEnumerate);
   1010   PrepareForBailoutForId(stmt->EnumId(), BailoutState::TOS_REGISTER);
   1011 
   1012   // If we got a map from the runtime call, we can do a fast
   1013   // modification check. Otherwise, we got a fixed array, and we have
   1014   // to do a slow check.
   1015   Label fixed_array;
   1016   __ ld(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
   1017   __ LoadRoot(at, Heap::kMetaMapRootIndex);
   1018   __ Branch(&fixed_array, ne, a2, Operand(at));
   1019 
   1020   // We got a map in register v0. Get the enumeration cache from it.
   1021   Label no_descriptors;
   1022   __ bind(&use_cache);
   1023 
   1024   __ EnumLength(a1, v0);
   1025   __ Branch(&no_descriptors, eq, a1, Operand(Smi::kZero));
   1026 
   1027   __ LoadInstanceDescriptors(v0, a2);
   1028   __ ld(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheOffset));
   1029   __ ld(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheBridgeCacheOffset));
   1030 
   1031   // Set up the four remaining stack slots.
   1032   __ li(a0, Operand(Smi::kZero));
   1033   // Push map, enumeration cache, enumeration cache length (as smi) and zero.
   1034   __ Push(v0, a2, a1, a0);
   1035   __ jmp(&loop);
   1036 
   1037   __ bind(&no_descriptors);
   1038   __ Drop(1);
   1039   __ jmp(&exit);
   1040 
   1041   // We got a fixed array in register v0. Iterate through that.
   1042   __ bind(&fixed_array);
   1043 
   1044   __ li(a1, Operand(Smi::FromInt(1)));  // Smi(1) indicates slow check
   1045   __ Push(a1, v0);  // Smi and array
   1046   __ ld(a1, FieldMemOperand(v0, FixedArray::kLengthOffset));
   1047   __ Push(a1);  // Fixed array length (as smi).
   1048   PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
   1049   __ li(a0, Operand(Smi::kZero));
   1050   __ Push(a0);  // Initial index.
   1051 
   1052   // Generate code for doing the condition check.
   1053   __ bind(&loop);
   1054   SetExpressionAsStatementPosition(stmt->each());
   1055 
   1056   // Load the current count to a0, load the length to a1.
   1057   __ ld(a0, MemOperand(sp, 0 * kPointerSize));
   1058   __ ld(a1, MemOperand(sp, 1 * kPointerSize));
   1059   __ Branch(loop_statement.break_label(), hs, a0, Operand(a1));
   1060 
   1061   // Get the current entry of the array into register a3.
   1062   __ ld(a2, MemOperand(sp, 2 * kPointerSize));
   1063   __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
   1064   __ SmiScale(a4, a0, kPointerSizeLog2);
   1065   __ daddu(a4, a2, a4);  // Array base + scaled (smi) index.
   1066   __ ld(result_register(), MemOperand(a4));  // Current entry.
   1067 
   1068   // Get the expected map from the stack or a smi in the
   1069   // permanent slow case into register a2.
   1070   __ ld(a2, MemOperand(sp, 3 * kPointerSize));
   1071 
   1072   // Check if the expected map still matches that of the enumerable.
   1073   // If not, we may have to filter the key.
   1074   Label update_each;
   1075   __ ld(a1, MemOperand(sp, 4 * kPointerSize));
   1076   __ ld(a4, FieldMemOperand(a1, HeapObject::kMapOffset));
   1077   __ Branch(&update_each, eq, a4, Operand(a2));
   1078 
   1079   // We need to filter the key, record slow-path here.
   1080   int const vector_index = SmiFromSlot(slot)->value();
   1081   __ EmitLoadFeedbackVector(a3);
   1082   __ li(a2, Operand(FeedbackVector::MegamorphicSentinel(isolate())));
   1083   __ sd(a2, FieldMemOperand(a3, FixedArray::OffsetOfElementAt(vector_index)));
   1084 
   1085   __ mov(a0, result_register());
   1086   // a0 contains the key. The receiver in a1 is the second argument to the
   1087   // ForInFilter. ForInFilter returns undefined if the receiver doesn't
   1088   // have the key or returns the name-converted key.
   1089   __ Call(isolate()->builtins()->ForInFilter(), RelocInfo::CODE_TARGET);
   1090   RestoreContext();
   1091   PrepareForBailoutForId(stmt->FilterId(), BailoutState::TOS_REGISTER);
   1092   __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
   1093   __ Branch(loop_statement.continue_label(), eq, result_register(),
   1094             Operand(at));
   1095 
   1096   // Update the 'each' property or variable from the possibly filtered
   1097   // entry in the result_register.
   1098   __ bind(&update_each);
   1099   // Perform the assignment as if via '='.
   1100   { EffectContext context(this);
   1101     EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
   1102     PrepareForBailoutForId(stmt->AssignmentId(), BailoutState::NO_REGISTERS);
   1103   }
   1104 
   1105   // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
   1106   PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS);
   1107   // Generate code for the body of the loop.
   1108   Visit(stmt->body());
   1109 
   1110   // Generate code for the going to the next element by incrementing
   1111   // the index (smi) stored on top of the stack.
   1112   __ bind(loop_statement.continue_label());
   1113   PrepareForBailoutForId(stmt->IncrementId(), BailoutState::NO_REGISTERS);
   1114   __ pop(a0);
   1115   __ Daddu(a0, a0, Operand(Smi::FromInt(1)));
   1116   __ push(a0);
   1117 
   1118   EmitBackEdgeBookkeeping(stmt, &loop);
   1119   __ Branch(&loop);
   1120 
   1121   // Remove the pointers stored on the stack.
   1122   __ bind(loop_statement.break_label());
   1123   DropOperands(5);
   1124 
   1125   // Exit and decrement the loop depth.
   1126   PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
   1127   __ bind(&exit);
   1128   decrement_loop_depth();
   1129 }
   1130 
   1131 void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
   1132                                           FeedbackSlot slot) {
   1133   DCHECK(NeedsHomeObject(initializer));
   1134   __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
   1135   __ ld(StoreDescriptor::ValueRegister(),
   1136         MemOperand(sp, offset * kPointerSize));
   1137   CallStoreIC(slot, isolate()->factory()->home_object_symbol());
   1138 }
   1139 
   1140 void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
   1141                                                      int offset,
   1142                                                      FeedbackSlot slot) {
   1143   DCHECK(NeedsHomeObject(initializer));
   1144   __ Move(StoreDescriptor::ReceiverRegister(), v0);
   1145   __ ld(StoreDescriptor::ValueRegister(),
   1146         MemOperand(sp, offset * kPointerSize));
   1147   CallStoreIC(slot, isolate()->factory()->home_object_symbol());
   1148 }
   1149 
   1150 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
   1151                                          TypeofMode typeof_mode) {
   1152   // Record position before possible IC call.
   1153   SetExpressionPosition(proxy);
   1154   PrepareForBailoutForId(proxy->BeforeId(), BailoutState::NO_REGISTERS);
   1155   Variable* var = proxy->var();
   1156 
   1157   // Two cases: global variables and all other types of variables.
   1158   switch (var->location()) {
   1159     case VariableLocation::UNALLOCATED: {
   1160       Comment cmnt(masm_, "[ Global variable");
   1161       EmitGlobalVariableLoad(proxy, typeof_mode);
   1162       context()->Plug(v0);
   1163       break;
   1164     }
   1165 
   1166     case VariableLocation::PARAMETER:
   1167     case VariableLocation::LOCAL:
   1168     case VariableLocation::CONTEXT: {
   1169       DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
   1170       Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
   1171                                                : "[ Stack variable");
   1172       if (proxy->hole_check_mode() == HoleCheckMode::kRequired) {
   1173         // Throw a reference error when using an uninitialized let/const
   1174         // binding in harmony mode.
   1175         Label done;
   1176         GetVar(v0, var);
   1177         __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
   1178         __ dsubu(at, v0, at);  // Sub as compare: at == 0 on eq.
   1179         __ Branch(&done, ne, at, Operand(zero_reg));
   1180         __ li(a0, Operand(var->name()));
   1181         __ push(a0);
   1182         __ CallRuntime(Runtime::kThrowReferenceError);
   1183         __ bind(&done);
   1184         context()->Plug(v0);
   1185         break;
   1186       }
   1187       context()->Plug(var);
   1188       break;
   1189     }
   1190 
   1191     case VariableLocation::LOOKUP:
   1192     case VariableLocation::MODULE:
   1193       UNREACHABLE();
   1194   }
   1195 }
   1196 
   1197 
   1198 void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
   1199   Expression* expression = (property == NULL) ? NULL : property->value();
   1200   if (expression == NULL) {
   1201     __ LoadRoot(a1, Heap::kNullValueRootIndex);
   1202     PushOperand(a1);
   1203   } else {
   1204     VisitForStackValue(expression);
   1205     if (NeedsHomeObject(expression)) {
   1206       DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
   1207              property->kind() == ObjectLiteral::Property::SETTER);
   1208       int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
   1209       EmitSetHomeObject(expression, offset, property->GetSlot());
   1210     }
   1211   }
   1212 }
   1213 
   1214 
   1215 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
   1216   Comment cmnt(masm_, "[ ObjectLiteral");
   1217 
   1218   Handle<BoilerplateDescription> constant_properties =
   1219       expr->GetOrBuildConstantProperties(isolate());
   1220   __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
   1221   __ li(a2, Operand(SmiFromSlot(expr->literal_slot())));
   1222   __ li(a1, Operand(constant_properties));
   1223   __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
   1224   if (MustCreateObjectLiteralWithRuntime(expr)) {
   1225     __ Push(a3, a2, a1, a0);
   1226     __ CallRuntime(Runtime::kCreateObjectLiteral);
   1227   } else {
   1228     Callable callable = CodeFactory::FastCloneShallowObject(
   1229         isolate(), expr->properties_count());
   1230     __ Call(callable.code(), RelocInfo::CODE_TARGET);
   1231     RestoreContext();
   1232   }
   1233   PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
   1234 
   1235   // If result_saved is true the result is on top of the stack.  If
   1236   // result_saved is false the result is in v0.
   1237   bool result_saved = false;
   1238 
   1239   AccessorTable accessor_table(zone());
   1240   for (int i = 0; i < expr->properties()->length(); i++) {
   1241     ObjectLiteral::Property* property = expr->properties()->at(i);
   1242     DCHECK(!property->is_computed_name());
   1243     if (property->IsCompileTimeValue()) continue;
   1244 
   1245     Literal* key = property->key()->AsLiteral();
   1246     Expression* value = property->value();
   1247     if (!result_saved) {
   1248       PushOperand(v0);  // Save result on stack.
   1249       result_saved = true;
   1250     }
   1251     switch (property->kind()) {
   1252       case ObjectLiteral::Property::SPREAD:
   1253       case ObjectLiteral::Property::CONSTANT:
   1254         UNREACHABLE();
   1255       case ObjectLiteral::Property::MATERIALIZED_LITERAL:
   1256         DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
   1257         // Fall through.
   1258       case ObjectLiteral::Property::COMPUTED:
   1259         // It is safe to use [[Put]] here because the boilerplate already
   1260         // contains computed properties with an uninitialized value.
   1261         if (key->IsStringLiteral()) {
   1262           DCHECK(key->IsPropertyName());
   1263           if (property->emit_store()) {
   1264             VisitForAccumulatorValue(value);
   1265             __ mov(StoreDescriptor::ValueRegister(), result_register());
   1266             DCHECK(StoreDescriptor::ValueRegister().is(a0));
   1267             __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
   1268             CallStoreIC(property->GetSlot(0), key->value(), true);
   1269             PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
   1270 
   1271             if (NeedsHomeObject(value)) {
   1272               EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
   1273             }
   1274           } else {
   1275             VisitForEffect(value);
   1276           }
   1277           break;
   1278         }
   1279         // Duplicate receiver on stack.
   1280         __ ld(a0, MemOperand(sp));
   1281         PushOperand(a0);
   1282         VisitForStackValue(key);
   1283         VisitForStackValue(value);
   1284         if (property->emit_store()) {
   1285           if (NeedsHomeObject(value)) {
   1286             EmitSetHomeObject(value, 2, property->GetSlot());
   1287           }
   1288           __ li(a0, Operand(Smi::FromInt(SLOPPY)));  // PropertyAttributes.
   1289           PushOperand(a0);
   1290           CallRuntimeWithOperands(Runtime::kSetProperty);
   1291         } else {
   1292           DropOperands(3);
   1293         }
   1294         break;
   1295       case ObjectLiteral::Property::PROTOTYPE:
   1296         // Duplicate receiver on stack.
   1297         __ ld(a0, MemOperand(sp));
   1298         PushOperand(a0);
   1299         VisitForStackValue(value);
   1300         DCHECK(property->emit_store());
   1301         CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
   1302         PrepareForBailoutForId(expr->GetIdForPropertySet(i),
   1303                                BailoutState::NO_REGISTERS);
   1304         break;
   1305       case ObjectLiteral::Property::GETTER:
   1306         if (property->emit_store()) {
   1307           AccessorTable::Iterator it = accessor_table.lookup(key);
   1308           it->second->bailout_id = expr->GetIdForPropertySet(i);
   1309           it->second->getter = property;
   1310         }
   1311         break;
   1312       case ObjectLiteral::Property::SETTER:
   1313         if (property->emit_store()) {
   1314           AccessorTable::Iterator it = accessor_table.lookup(key);
   1315           it->second->bailout_id = expr->GetIdForPropertySet(i);
   1316           it->second->setter = property;
   1317         }
   1318         break;
   1319     }
   1320   }
   1321 
   1322   // Emit code to define accessors, using only a single call to the runtime for
   1323   // each pair of corresponding getters and setters.
   1324   for (AccessorTable::Iterator it = accessor_table.begin();
   1325        it != accessor_table.end();
   1326        ++it) {
   1327     __ ld(a0, MemOperand(sp));  // Duplicate receiver.
   1328     PushOperand(a0);
   1329     VisitForStackValue(it->first);
   1330     EmitAccessor(it->second->getter);
   1331     EmitAccessor(it->second->setter);
   1332     __ li(a0, Operand(Smi::FromInt(NONE)));
   1333     PushOperand(a0);
   1334     CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
   1335     PrepareForBailoutForId(it->second->bailout_id, BailoutState::NO_REGISTERS);
   1336   }
   1337 
   1338   if (result_saved) {
   1339     context()->PlugTOS();
   1340   } else {
   1341     context()->Plug(v0);
   1342   }
   1343 }
   1344 
   1345 
   1346 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
   1347   Comment cmnt(masm_, "[ ArrayLiteral");
   1348 
   1349   Handle<ConstantElementsPair> constant_elements =
   1350       expr->GetOrBuildConstantElements(isolate());
   1351 
   1352   __ mov(a0, result_register());
   1353   __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
   1354   __ li(a2, Operand(SmiFromSlot(expr->literal_slot())));
   1355   __ li(a1, Operand(constant_elements));
   1356   if (MustCreateArrayLiteralWithRuntime(expr)) {
   1357     __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
   1358     __ Push(a3, a2, a1, a0);
   1359     __ CallRuntime(Runtime::kCreateArrayLiteral);
   1360   } else {
   1361     Callable callable =
   1362         CodeFactory::FastCloneShallowArray(isolate(), TRACK_ALLOCATION_SITE);
   1363     __ Call(callable.code(), RelocInfo::CODE_TARGET);
   1364     RestoreContext();
   1365   }
   1366   PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
   1367 
   1368   bool result_saved = false;  // Is the result saved to the stack?
   1369   ZoneList<Expression*>* subexprs = expr->values();
   1370   int length = subexprs->length();
   1371 
   1372   // Emit code to evaluate all the non-constant subexpressions and to store
   1373   // them into the newly cloned array.
   1374   for (int array_index = 0; array_index < length; array_index++) {
   1375     Expression* subexpr = subexprs->at(array_index);
   1376     DCHECK(!subexpr->IsSpread());
   1377 
   1378     // If the subexpression is a literal or a simple materialized literal it
   1379     // is already set in the cloned array.
   1380     if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
   1381 
   1382     if (!result_saved) {
   1383       PushOperand(v0);  // array literal
   1384       result_saved = true;
   1385     }
   1386 
   1387     VisitForAccumulatorValue(subexpr);
   1388 
   1389     __ li(StoreDescriptor::NameRegister(), Operand(Smi::FromInt(array_index)));
   1390     __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0));
   1391     __ mov(StoreDescriptor::ValueRegister(), result_register());
   1392     CallKeyedStoreIC(expr->LiteralFeedbackSlot());
   1393 
   1394     PrepareForBailoutForId(expr->GetIdForElement(array_index),
   1395                            BailoutState::NO_REGISTERS);
   1396   }
   1397 
   1398   if (result_saved) {
   1399     context()->PlugTOS();
   1400   } else {
   1401     context()->Plug(v0);
   1402   }
   1403 }
   1404 
   1405 
   1406 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
   1407   DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
   1408 
   1409   Comment cmnt(masm_, "[ Assignment");
   1410 
   1411   Property* property = expr->target()->AsProperty();
   1412   LhsKind assign_type = Property::GetAssignType(property);
   1413 
   1414   // Evaluate LHS expression.
   1415   switch (assign_type) {
   1416     case VARIABLE:
   1417       // Nothing to do here.
   1418       break;
   1419     case NAMED_PROPERTY:
   1420       if (expr->is_compound()) {
   1421         // We need the receiver both on the stack and in the register.
   1422         VisitForStackValue(property->obj());
   1423         __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
   1424       } else {
   1425         VisitForStackValue(property->obj());
   1426       }
   1427       break;
   1428     case KEYED_PROPERTY:
   1429       // We need the key and receiver on both the stack and in v0 and a1.
   1430       if (expr->is_compound()) {
   1431         VisitForStackValue(property->obj());
   1432         VisitForStackValue(property->key());
   1433         __ ld(LoadDescriptor::ReceiverRegister(),
   1434               MemOperand(sp, 1 * kPointerSize));
   1435         __ ld(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
   1436       } else {
   1437         VisitForStackValue(property->obj());
   1438         VisitForStackValue(property->key());
   1439       }
   1440       break;
   1441     case NAMED_SUPER_PROPERTY:
   1442     case KEYED_SUPER_PROPERTY:
   1443       UNREACHABLE();
   1444       break;
   1445   }
   1446 
   1447   // For compound assignments we need another deoptimization point after the
   1448   // variable/property load.
   1449   if (expr->is_compound()) {
   1450     { AccumulatorValueContext context(this);
   1451       switch (assign_type) {
   1452         case VARIABLE:
   1453           EmitVariableLoad(expr->target()->AsVariableProxy());
   1454           PrepareForBailout(expr->target(), BailoutState::TOS_REGISTER);
   1455           break;
   1456         case NAMED_PROPERTY:
   1457           EmitNamedPropertyLoad(property);
   1458           PrepareForBailoutForId(property->LoadId(),
   1459                                  BailoutState::TOS_REGISTER);
   1460           break;
   1461         case KEYED_PROPERTY:
   1462           EmitKeyedPropertyLoad(property);
   1463           PrepareForBailoutForId(property->LoadId(),
   1464                                  BailoutState::TOS_REGISTER);
   1465           break;
   1466         case NAMED_SUPER_PROPERTY:
   1467         case KEYED_SUPER_PROPERTY:
   1468           UNREACHABLE();
   1469           break;
   1470       }
   1471     }
   1472 
   1473     Token::Value op = expr->binary_op();
   1474     PushOperand(v0);  // Left operand goes on the stack.
   1475     VisitForAccumulatorValue(expr->value());
   1476 
   1477     AccumulatorValueContext context(this);
   1478     if (ShouldInlineSmiCase(op)) {
   1479       EmitInlineSmiBinaryOp(expr->binary_operation(),
   1480                             op,
   1481                             expr->target(),
   1482                             expr->value());
   1483     } else {
   1484       EmitBinaryOp(expr->binary_operation(), op);
   1485     }
   1486 
   1487     // Deoptimization point in case the binary operation may have side effects.
   1488     PrepareForBailout(expr->binary_operation(), BailoutState::TOS_REGISTER);
   1489   } else {
   1490     VisitForAccumulatorValue(expr->value());
   1491   }
   1492 
   1493   SetExpressionPosition(expr);
   1494 
   1495   // Store the value.
   1496   switch (assign_type) {
   1497     case VARIABLE: {
   1498       VariableProxy* proxy = expr->target()->AsVariableProxy();
   1499       EmitVariableAssignment(proxy->var(), expr->op(), expr->AssignmentSlot(),
   1500                              proxy->hole_check_mode());
   1501       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
   1502       context()->Plug(v0);
   1503       break;
   1504     }
   1505     case NAMED_PROPERTY:
   1506       EmitNamedPropertyAssignment(expr);
   1507       break;
   1508     case KEYED_PROPERTY:
   1509       EmitKeyedPropertyAssignment(expr);
   1510       break;
   1511     case NAMED_SUPER_PROPERTY:
   1512     case KEYED_SUPER_PROPERTY:
   1513       UNREACHABLE();
   1514       break;
   1515   }
   1516 }
   1517 
   1518 
   1519 void FullCodeGenerator::VisitYield(Yield* expr) {
   1520   // Resumable functions are not supported.
   1521   UNREACHABLE();
   1522 }
   1523 
   1524 void FullCodeGenerator::PushOperands(Register reg1, Register reg2) {
   1525   OperandStackDepthIncrement(2);
   1526   __ Push(reg1, reg2);
   1527 }
   1528 
   1529 void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
   1530                                      Register reg3) {
   1531   OperandStackDepthIncrement(3);
   1532   __ Push(reg1, reg2, reg3);
   1533 }
   1534 
   1535 void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
   1536                                      Register reg3, Register reg4) {
   1537   OperandStackDepthIncrement(4);
   1538   __ Push(reg1, reg2, reg3, reg4);
   1539 }
   1540 
   1541 void FullCodeGenerator::PopOperands(Register reg1, Register reg2) {
   1542   OperandStackDepthDecrement(2);
   1543   __ Pop(reg1, reg2);
   1544 }
   1545 
   1546 void FullCodeGenerator::EmitOperandStackDepthCheck() {
   1547   if (FLAG_debug_code) {
   1548     int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
   1549                         operand_stack_depth_ * kPointerSize;
   1550     __ Dsubu(v0, fp, sp);
   1551     __ Assert(eq, kUnexpectedStackDepth, v0, Operand(expected_diff));
   1552   }
   1553 }
   1554 
   1555 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
   1556   Label allocate, done_allocate;
   1557 
   1558   __ Allocate(JSIteratorResult::kSize, v0, a2, a3, &allocate,
   1559               NO_ALLOCATION_FLAGS);
   1560   __ jmp(&done_allocate);
   1561 
   1562   __ bind(&allocate);
   1563   __ Push(Smi::FromInt(JSIteratorResult::kSize));
   1564   __ CallRuntime(Runtime::kAllocateInNewSpace);
   1565 
   1566   __ bind(&done_allocate);
   1567   __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, a1);
   1568   PopOperand(a2);
   1569   __ LoadRoot(a3,
   1570               done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
   1571   __ LoadRoot(a4, Heap::kEmptyFixedArrayRootIndex);
   1572   __ sd(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
   1573   __ sd(a4, FieldMemOperand(v0, JSObject::kPropertiesOffset));
   1574   __ sd(a4, FieldMemOperand(v0, JSObject::kElementsOffset));
   1575   __ sd(a2, FieldMemOperand(v0, JSIteratorResult::kValueOffset));
   1576   __ sd(a3, FieldMemOperand(v0, JSIteratorResult::kDoneOffset));
   1577   STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
   1578 }
   1579 
   1580 
   1581 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
   1582                                               Token::Value op,
   1583                                               Expression* left_expr,
   1584                                               Expression* right_expr) {
   1585   Label done, smi_case, stub_call;
   1586 
   1587   Register scratch1 = a2;
   1588   Register scratch2 = a3;
   1589 
   1590   // Get the arguments.
   1591   Register left = a1;
   1592   Register right = a0;
   1593   PopOperand(left);
   1594   __ mov(a0, result_register());
   1595 
   1596   // Perform combined smi check on both operands.
   1597   __ Or(scratch1, left, Operand(right));
   1598   STATIC_ASSERT(kSmiTag == 0);
   1599   JumpPatchSite patch_site(masm_);
   1600   patch_site.EmitJumpIfSmi(scratch1, &smi_case);
   1601 
   1602   __ bind(&stub_call);
   1603   Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
   1604   CallIC(code, expr->BinaryOperationFeedbackId());
   1605   patch_site.EmitPatchInfo();
   1606   __ jmp(&done);
   1607 
   1608   __ bind(&smi_case);
   1609   // Smi case. This code works the same way as the smi-smi case in the type
   1610   // recording binary operation stub, see
   1611   switch (op) {
   1612     case Token::SAR:
   1613       __ GetLeastBitsFromSmi(scratch1, right, 5);
   1614       __ dsrav(right, left, scratch1);
   1615       __ And(v0, right, Operand(0xffffffff00000000L));
   1616       break;
   1617     case Token::SHL: {
   1618       __ SmiUntag(scratch1, left);
   1619       __ GetLeastBitsFromSmi(scratch2, right, 5);
   1620       __ dsllv(scratch1, scratch1, scratch2);
   1621       __ SmiTag(v0, scratch1);
   1622       break;
   1623     }
   1624     case Token::SHR: {
   1625       __ SmiUntag(scratch1, left);
   1626       __ GetLeastBitsFromSmi(scratch2, right, 5);
   1627       __ dsrlv(scratch1, scratch1, scratch2);
   1628       __ And(scratch2, scratch1, 0x80000000);
   1629       __ Branch(&stub_call, ne, scratch2, Operand(zero_reg));
   1630       __ SmiTag(v0, scratch1);
   1631       break;
   1632     }
   1633     case Token::ADD:
   1634       __ DaddBranchOvf(v0, left, Operand(right), &stub_call);
   1635       break;
   1636     case Token::SUB:
   1637       __ DsubBranchOvf(v0, left, Operand(right), &stub_call);
   1638       break;
   1639     case Token::MUL: {
   1640       __ Dmulh(v0, left, right);
   1641       __ dsra32(scratch2, v0, 0);
   1642       __ sra(scratch1, v0, 31);
   1643       __ Branch(USE_DELAY_SLOT, &stub_call, ne, scratch2, Operand(scratch1));
   1644       __ SmiTag(v0);
   1645       __ Branch(USE_DELAY_SLOT, &done, ne, v0, Operand(zero_reg));
   1646       __ Daddu(scratch2, right, left);
   1647       __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
   1648       DCHECK(Smi::kZero == 0);
   1649       __ mov(v0, zero_reg);
   1650       break;
   1651     }
   1652     case Token::BIT_OR:
   1653       __ Or(v0, left, Operand(right));
   1654       break;
   1655     case Token::BIT_AND:
   1656       __ And(v0, left, Operand(right));
   1657       break;
   1658     case Token::BIT_XOR:
   1659       __ Xor(v0, left, Operand(right));
   1660       break;
   1661     default:
   1662       UNREACHABLE();
   1663   }
   1664 
   1665   __ bind(&done);
   1666   context()->Plug(v0);
   1667 }
   1668 
   1669 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
   1670   __ mov(a0, result_register());
   1671   PopOperand(a1);
   1672   Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
   1673   JumpPatchSite patch_site(masm_);    // unbound, signals no inlined smi code.
   1674   CallIC(code, expr->BinaryOperationFeedbackId());
   1675   patch_site.EmitPatchInfo();
   1676   context()->Plug(v0);
   1677 }
   1678 
   1679 void FullCodeGenerator::EmitAssignment(Expression* expr, FeedbackSlot slot) {
   1680   DCHECK(expr->IsValidReferenceExpressionOrThis());
   1681 
   1682   Property* prop = expr->AsProperty();
   1683   LhsKind assign_type = Property::GetAssignType(prop);
   1684 
   1685   switch (assign_type) {
   1686     case VARIABLE: {
   1687       VariableProxy* proxy = expr->AsVariableProxy();
   1688       EffectContext context(this);
   1689       EmitVariableAssignment(proxy->var(), Token::ASSIGN, slot,
   1690                              proxy->hole_check_mode());
   1691       break;
   1692     }
   1693     case NAMED_PROPERTY: {
   1694       PushOperand(result_register());  // Preserve value.
   1695       VisitForAccumulatorValue(prop->obj());
   1696       __ mov(StoreDescriptor::ReceiverRegister(), result_register());
   1697       PopOperand(StoreDescriptor::ValueRegister());  // Restore value.
   1698       CallStoreIC(slot, prop->key()->AsLiteral()->value());
   1699       break;
   1700     }
   1701     case KEYED_PROPERTY: {
   1702       PushOperand(result_register());  // Preserve value.
   1703       VisitForStackValue(prop->obj());
   1704       VisitForAccumulatorValue(prop->key());
   1705       __ Move(StoreDescriptor::NameRegister(), result_register());
   1706       PopOperands(StoreDescriptor::ValueRegister(),
   1707                   StoreDescriptor::ReceiverRegister());
   1708       CallKeyedStoreIC(slot);
   1709       break;
   1710     }
   1711     case NAMED_SUPER_PROPERTY:
   1712     case KEYED_SUPER_PROPERTY:
   1713       UNREACHABLE();
   1714       break;
   1715   }
   1716   context()->Plug(v0);
   1717 }
   1718 
   1719 
   1720 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
   1721     Variable* var, MemOperand location) {
   1722   __ sd(result_register(), location);
   1723   if (var->IsContextSlot()) {
   1724     // RecordWrite may destroy all its register arguments.
   1725     __ Move(a3, result_register());
   1726     int offset = Context::SlotOffset(var->index());
   1727     __ RecordWriteContextSlot(
   1728         a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
   1729   }
   1730 }
   1731 
   1732 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
   1733                                                FeedbackSlot slot,
   1734                                                HoleCheckMode hole_check_mode) {
   1735   if (var->IsUnallocated()) {
   1736     // Global var, const, or let.
   1737     __ mov(StoreDescriptor::ValueRegister(), result_register());
   1738     __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
   1739     CallStoreIC(slot, var->name());
   1740 
   1741   } else if (IsLexicalVariableMode(var->mode()) && op != Token::INIT) {
   1742     DCHECK(!var->IsLookupSlot());
   1743     DCHECK(var->IsStackAllocated() || var->IsContextSlot());
   1744     MemOperand location = VarOperand(var, a1);
   1745     // Perform an initialization check for lexically declared variables.
   1746     if (hole_check_mode == HoleCheckMode::kRequired) {
   1747       Label assign;
   1748       __ ld(a3, location);
   1749       __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
   1750       __ Branch(&assign, ne, a3, Operand(a4));
   1751       __ li(a3, Operand(var->name()));
   1752       __ push(a3);
   1753       __ CallRuntime(Runtime::kThrowReferenceError);
   1754       __ bind(&assign);
   1755     }
   1756     if (var->mode() != CONST) {
   1757       EmitStoreToStackLocalOrContextSlot(var, location);
   1758     } else if (var->throw_on_const_assignment(language_mode())) {
   1759       __ CallRuntime(Runtime::kThrowConstAssignError);
   1760     }
   1761   } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
   1762     // Initializing assignment to const {this} needs a write barrier.
   1763     DCHECK(var->IsStackAllocated() || var->IsContextSlot());
   1764     Label uninitialized_this;
   1765     MemOperand location = VarOperand(var, a1);
   1766     __ ld(a3, location);
   1767     __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
   1768     __ Branch(&uninitialized_this, eq, a3, Operand(at));
   1769     __ li(a0, Operand(var->name()));
   1770     __ Push(a0);
   1771     __ CallRuntime(Runtime::kThrowReferenceError);
   1772     __ bind(&uninitialized_this);
   1773     EmitStoreToStackLocalOrContextSlot(var, location);
   1774 
   1775   } else {
   1776     DCHECK(var->mode() != CONST || op == Token::INIT);
   1777     DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
   1778     DCHECK(!var->IsLookupSlot());
   1779     // Assignment to var or initializing assignment to let/const in harmony
   1780     // mode.
   1781     MemOperand location = VarOperand(var, a1);
   1782     if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
   1783       // Check for an uninitialized let binding.
   1784       __ ld(a2, location);
   1785       __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
   1786       __ Check(eq, kLetBindingReInitialization, a2, Operand(a4));
   1787     }
   1788     EmitStoreToStackLocalOrContextSlot(var, location);
   1789   }
   1790 }
   1791 
   1792 
   1793 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
   1794   // Assignment to a property, using a named store IC.
   1795   Property* prop = expr->target()->AsProperty();
   1796   DCHECK(prop != NULL);
   1797   DCHECK(prop->key()->IsLiteral());
   1798 
   1799   __ mov(StoreDescriptor::ValueRegister(), result_register());
   1800   PopOperand(StoreDescriptor::ReceiverRegister());
   1801   CallStoreIC(expr->AssignmentSlot(), prop->key()->AsLiteral()->value());
   1802 
   1803   PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
   1804   context()->Plug(v0);
   1805 }
   1806 
   1807 
   1808 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
   1809   // Assignment to a property, using a keyed store IC.
   1810   // Call keyed store IC.
   1811   // The arguments are:
   1812   // - a0 is the value,
   1813   // - a1 is the key,
   1814   // - a2 is the receiver.
   1815   __ mov(StoreDescriptor::ValueRegister(), result_register());
   1816   PopOperands(StoreDescriptor::ReceiverRegister(),
   1817               StoreDescriptor::NameRegister());
   1818   DCHECK(StoreDescriptor::ValueRegister().is(a0));
   1819 
   1820   CallKeyedStoreIC(expr->AssignmentSlot());
   1821 
   1822   PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
   1823   context()->Plug(v0);
   1824 }
   1825 
   1826 // Code common for calls using the IC.
   1827 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
   1828   Expression* callee = expr->expression();
   1829 
   1830   // Get the target function.
   1831   ConvertReceiverMode convert_mode;
   1832   if (callee->IsVariableProxy()) {
   1833     { StackValueContext context(this);
   1834       EmitVariableLoad(callee->AsVariableProxy());
   1835       PrepareForBailout(callee, BailoutState::NO_REGISTERS);
   1836     }
   1837     // Push undefined as receiver. This is patched in the method prologue if it
   1838     // is a sloppy mode method.
   1839     __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
   1840     PushOperand(at);
   1841     convert_mode = ConvertReceiverMode::kNullOrUndefined;
   1842   } else {
   1843     // Load the function from the receiver.
   1844     DCHECK(callee->IsProperty());
   1845     DCHECK(!callee->AsProperty()->IsSuperAccess());
   1846     __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
   1847     EmitNamedPropertyLoad(callee->AsProperty());
   1848     PrepareForBailoutForId(callee->AsProperty()->LoadId(),
   1849                            BailoutState::TOS_REGISTER);
   1850     // Push the target function under the receiver.
   1851     __ ld(at, MemOperand(sp, 0));
   1852     PushOperand(at);
   1853     __ sd(v0, MemOperand(sp, kPointerSize));
   1854     convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
   1855   }
   1856 
   1857   EmitCall(expr, convert_mode);
   1858 }
   1859 
   1860 
   1861 // Code common for calls using the IC.
   1862 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
   1863                                                 Expression* key) {
   1864   // Load the key.
   1865   VisitForAccumulatorValue(key);
   1866 
   1867   Expression* callee = expr->expression();
   1868 
   1869   // Load the function from the receiver.
   1870   DCHECK(callee->IsProperty());
   1871   __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
   1872   __ Move(LoadDescriptor::NameRegister(), v0);
   1873   EmitKeyedPropertyLoad(callee->AsProperty());
   1874   PrepareForBailoutForId(callee->AsProperty()->LoadId(),
   1875                          BailoutState::TOS_REGISTER);
   1876 
   1877   // Push the target function under the receiver.
   1878   __ ld(at, MemOperand(sp, 0));
   1879   PushOperand(at);
   1880   __ sd(v0, MemOperand(sp, kPointerSize));
   1881 
   1882   EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
   1883 }
   1884 
   1885 
   1886 void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
   1887   // Load the arguments.
   1888   ZoneList<Expression*>* args = expr->arguments();
   1889   int arg_count = args->length();
   1890   for (int i = 0; i < arg_count; i++) {
   1891     VisitForStackValue(args->at(i));
   1892   }
   1893 
   1894   PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
   1895   // Record source position of the IC call.
   1896   SetCallPosition(expr, expr->tail_call_mode());
   1897   if (expr->tail_call_mode() == TailCallMode::kAllow) {
   1898     if (FLAG_trace) {
   1899       __ CallRuntime(Runtime::kTraceTailCall);
   1900     }
   1901     // Update profiling counters before the tail call since we will
   1902     // not return to this function.
   1903     EmitProfilingCounterHandlingForReturnSequence(true);
   1904   }
   1905   Handle<Code> code =
   1906       CodeFactory::CallICTrampoline(isolate(), mode, expr->tail_call_mode())
   1907           .code();
   1908   __ li(a3, Operand(IntFromSlot(expr->CallFeedbackICSlot())));
   1909   __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
   1910   __ li(a0, Operand(arg_count));
   1911   CallIC(code);
   1912   OperandStackDepthDecrement(arg_count + 1);
   1913 
   1914   RecordJSReturnSite(expr);
   1915   RestoreContext();
   1916   context()->DropAndPlug(1, v0);
   1917 }
   1918 
   1919 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
   1920   Comment cmnt(masm_, "[ CallNew");
   1921   // According to ECMA-262, section 11.2.2, page 44, the function
   1922   // expression in new calls must be evaluated before the
   1923   // arguments.
   1924 
   1925   // Push constructor on the stack.  If it's not a function it's used as
   1926   // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
   1927   // ignored.
   1928   DCHECK(!expr->expression()->IsSuperPropertyReference());
   1929   VisitForStackValue(expr->expression());
   1930 
   1931   // Push the arguments ("left-to-right") on the stack.
   1932   ZoneList<Expression*>* args = expr->arguments();
   1933   int arg_count = args->length();
   1934   for (int i = 0; i < arg_count; i++) {
   1935     VisitForStackValue(args->at(i));
   1936   }
   1937 
   1938   // Call the construct call builtin that handles allocation and
   1939   // constructor invocation.
   1940   SetConstructCallPosition(expr);
   1941 
   1942   // Load function and argument count into a1 and a0.
   1943   __ li(a0, Operand(arg_count));
   1944   __ ld(a1, MemOperand(sp, arg_count * kPointerSize));
   1945 
   1946   // Record call targets in unoptimized code.
   1947   __ EmitLoadFeedbackVector(a2);
   1948   __ li(a3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
   1949 
   1950   CallConstructStub stub(isolate());
   1951   CallIC(stub.GetCode());
   1952   OperandStackDepthDecrement(arg_count + 1);
   1953   PrepareForBailoutForId(expr->ReturnId(), BailoutState::TOS_REGISTER);
   1954   RestoreContext();
   1955   context()->Plug(v0);
   1956 }
   1957 
   1958 
   1959 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
   1960   ZoneList<Expression*>* args = expr->arguments();
   1961   DCHECK(args->length() == 1);
   1962 
   1963   VisitForAccumulatorValue(args->at(0));
   1964 
   1965   Label materialize_true, materialize_false;
   1966   Label* if_true = NULL;
   1967   Label* if_false = NULL;
   1968   Label* fall_through = NULL;
   1969   context()->PrepareTest(&materialize_true, &materialize_false,
   1970                          &if_true, &if_false, &fall_through);
   1971 
   1972   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   1973   __ SmiTst(v0, a4);
   1974   Split(eq, a4, Operand(zero_reg), if_true, if_false, fall_through);
   1975 
   1976   context()->Plug(if_true, if_false);
   1977 }
   1978 
   1979 
   1980 void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
   1981   ZoneList<Expression*>* args = expr->arguments();
   1982   DCHECK(args->length() == 1);
   1983 
   1984   VisitForAccumulatorValue(args->at(0));
   1985 
   1986   Label materialize_true, materialize_false;
   1987   Label* if_true = NULL;
   1988   Label* if_false = NULL;
   1989   Label* fall_through = NULL;
   1990   context()->PrepareTest(&materialize_true, &materialize_false,
   1991                          &if_true, &if_false, &fall_through);
   1992 
   1993   __ JumpIfSmi(v0, if_false);
   1994   __ GetObjectType(v0, a1, a1);
   1995   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   1996   Split(ge, a1, Operand(FIRST_JS_RECEIVER_TYPE),
   1997         if_true, if_false, fall_through);
   1998 
   1999   context()->Plug(if_true, if_false);
   2000 }
   2001 
   2002 
   2003 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
   2004   ZoneList<Expression*>* args = expr->arguments();
   2005   DCHECK(args->length() == 1);
   2006 
   2007   VisitForAccumulatorValue(args->at(0));
   2008 
   2009   Label materialize_true, materialize_false;
   2010   Label* if_true = NULL;
   2011   Label* if_false = NULL;
   2012   Label* fall_through = NULL;
   2013   context()->PrepareTest(&materialize_true, &materialize_false,
   2014                          &if_true, &if_false, &fall_through);
   2015 
   2016   __ JumpIfSmi(v0, if_false);
   2017   __ GetObjectType(v0, a1, a1);
   2018   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2019   Split(eq, a1, Operand(JS_ARRAY_TYPE),
   2020         if_true, if_false, fall_through);
   2021 
   2022   context()->Plug(if_true, if_false);
   2023 }
   2024 
   2025 
   2026 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
   2027   ZoneList<Expression*>* args = expr->arguments();
   2028   DCHECK(args->length() == 1);
   2029 
   2030   VisitForAccumulatorValue(args->at(0));
   2031 
   2032   Label materialize_true, materialize_false;
   2033   Label* if_true = NULL;
   2034   Label* if_false = NULL;
   2035   Label* fall_through = NULL;
   2036   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
   2037                          &if_false, &fall_through);
   2038 
   2039   __ JumpIfSmi(v0, if_false);
   2040   __ GetObjectType(v0, a1, a1);
   2041   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2042   Split(eq, a1, Operand(JS_TYPED_ARRAY_TYPE), if_true, if_false, fall_through);
   2043 
   2044   context()->Plug(if_true, if_false);
   2045 }
   2046 
   2047 
   2048 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
   2049   ZoneList<Expression*>* args = expr->arguments();
   2050   DCHECK(args->length() == 1);
   2051 
   2052   VisitForAccumulatorValue(args->at(0));
   2053 
   2054   Label materialize_true, materialize_false;
   2055   Label* if_true = NULL;
   2056   Label* if_false = NULL;
   2057   Label* fall_through = NULL;
   2058   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
   2059                          &if_false, &fall_through);
   2060 
   2061   __ JumpIfSmi(v0, if_false);
   2062   __ GetObjectType(v0, a1, a1);
   2063   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2064   Split(eq, a1, Operand(JS_PROXY_TYPE), if_true, if_false, fall_through);
   2065 
   2066   context()->Plug(if_true, if_false);
   2067 }
   2068 
   2069 
   2070 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
   2071   ZoneList<Expression*>* args = expr->arguments();
   2072   DCHECK(args->length() == 1);
   2073   Label done, null, function, non_function_constructor;
   2074 
   2075   VisitForAccumulatorValue(args->at(0));
   2076 
   2077   // If the object is not a JSReceiver, we return null.
   2078   __ JumpIfSmi(v0, &null);
   2079   STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
   2080   __ GetObjectType(v0, v0, a1);  // Map is now in v0.
   2081   __ Branch(&null, lt, a1, Operand(FIRST_JS_RECEIVER_TYPE));
   2082 
   2083   // Return 'Function' for JSFunction and JSBoundFunction objects.
   2084   STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
   2085   __ Branch(&function, hs, a1, Operand(FIRST_FUNCTION_TYPE));
   2086 
   2087   // Check if the constructor in the map is a JS function.
   2088   Register instance_type = a2;
   2089   __ GetMapConstructor(v0, v0, a1, instance_type);
   2090   __ Branch(&non_function_constructor, ne, instance_type,
   2091             Operand(JS_FUNCTION_TYPE));
   2092 
   2093   // v0 now contains the constructor function. Grab the
   2094   // instance class name from there.
   2095   __ ld(v0, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset));
   2096   __ ld(v0, FieldMemOperand(v0, SharedFunctionInfo::kInstanceClassNameOffset));
   2097   __ Branch(&done);
   2098 
   2099   // Functions have class 'Function'.
   2100   __ bind(&function);
   2101   __ LoadRoot(v0, Heap::kFunction_stringRootIndex);
   2102   __ jmp(&done);
   2103 
   2104   // Objects with a non-function constructor have class 'Object'.
   2105   __ bind(&non_function_constructor);
   2106   __ LoadRoot(v0, Heap::kObject_stringRootIndex);
   2107   __ jmp(&done);
   2108 
   2109   // Non-JS objects have class null.
   2110   __ bind(&null);
   2111   __ LoadRoot(v0, Heap::kNullValueRootIndex);
   2112 
   2113   // All done.
   2114   __ bind(&done);
   2115 
   2116   context()->Plug(v0);
   2117 }
   2118 
   2119 
   2120 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
   2121   ZoneList<Expression*>* args = expr->arguments();
   2122   DCHECK(args->length() == 2);
   2123 
   2124   VisitForStackValue(args->at(0));
   2125   VisitForAccumulatorValue(args->at(1));
   2126   __ mov(a0, result_register());
   2127 
   2128   Register object = a1;
   2129   Register index = a0;
   2130   Register result = v0;
   2131 
   2132   PopOperand(object);
   2133 
   2134   Label need_conversion;
   2135   Label index_out_of_range;
   2136   Label done;
   2137   StringCharCodeAtGenerator generator(object, index, result, &need_conversion,
   2138                                       &need_conversion, &index_out_of_range);
   2139   generator.GenerateFast(masm_);
   2140   __ jmp(&done);
   2141 
   2142   __ bind(&index_out_of_range);
   2143   // When the index is out of range, the spec requires us to return
   2144   // NaN.
   2145   __ LoadRoot(result, Heap::kNanValueRootIndex);
   2146   __ jmp(&done);
   2147 
   2148   __ bind(&need_conversion);
   2149   // Load the undefined value into the result register, which will
   2150   // trigger conversion.
   2151   __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
   2152   __ jmp(&done);
   2153 
   2154   NopRuntimeCallHelper call_helper;
   2155   generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
   2156 
   2157   __ bind(&done);
   2158   context()->Plug(result);
   2159 }
   2160 
   2161 
   2162 void FullCodeGenerator::EmitCall(CallRuntime* expr) {
   2163   ZoneList<Expression*>* args = expr->arguments();
   2164   DCHECK_LE(2, args->length());
   2165   // Push target, receiver and arguments onto the stack.
   2166   for (Expression* const arg : *args) {
   2167     VisitForStackValue(arg);
   2168   }
   2169   PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
   2170   // Move target to a1.
   2171   int const argc = args->length() - 2;
   2172   __ ld(a1, MemOperand(sp, (argc + 1) * kPointerSize));
   2173   // Call the target.
   2174   __ li(a0, Operand(argc));
   2175   __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
   2176   OperandStackDepthDecrement(argc + 1);
   2177   RestoreContext();
   2178   // Discard the function left on TOS.
   2179   context()->DropAndPlug(1, v0);
   2180 }
   2181 
   2182 void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
   2183   ZoneList<Expression*>* args = expr->arguments();
   2184   DCHECK_EQ(1, args->length());
   2185   VisitForAccumulatorValue(args->at(0));
   2186   __ AssertFunction(v0);
   2187   __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
   2188   __ ld(v0, FieldMemOperand(v0, Map::kPrototypeOffset));
   2189   context()->Plug(v0);
   2190 }
   2191 
   2192 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
   2193   DCHECK(expr->arguments()->length() == 0);
   2194   ExternalReference debug_is_active =
   2195       ExternalReference::debug_is_active_address(isolate());
   2196   __ li(at, Operand(debug_is_active));
   2197   __ lbu(v0, MemOperand(at));
   2198   __ SmiTag(v0);
   2199   context()->Plug(v0);
   2200 }
   2201 
   2202 
   2203 void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
   2204   ZoneList<Expression*>* args = expr->arguments();
   2205   DCHECK_EQ(2, args->length());
   2206   VisitForStackValue(args->at(0));
   2207   VisitForStackValue(args->at(1));
   2208 
   2209   Label runtime, done;
   2210 
   2211   __ Allocate(JSIteratorResult::kSize, v0, a2, a3, &runtime,
   2212               NO_ALLOCATION_FLAGS);
   2213   __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, a1);
   2214   __ Pop(a2, a3);
   2215   __ LoadRoot(a4, Heap::kEmptyFixedArrayRootIndex);
   2216   __ sd(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
   2217   __ sd(a4, FieldMemOperand(v0, JSObject::kPropertiesOffset));
   2218   __ sd(a4, FieldMemOperand(v0, JSObject::kElementsOffset));
   2219   __ sd(a2, FieldMemOperand(v0, JSIteratorResult::kValueOffset));
   2220   __ sd(a3, FieldMemOperand(v0, JSIteratorResult::kDoneOffset));
   2221   STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
   2222   __ jmp(&done);
   2223 
   2224   __ bind(&runtime);
   2225   CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
   2226 
   2227   __ bind(&done);
   2228   context()->Plug(v0);
   2229 }
   2230 
   2231 
   2232 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
   2233   // Push function.
   2234   __ LoadNativeContextSlot(expr->context_index(), v0);
   2235   PushOperand(v0);
   2236 
   2237   // Push undefined as the receiver.
   2238   __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
   2239   PushOperand(v0);
   2240 }
   2241 
   2242 
   2243 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
   2244   ZoneList<Expression*>* args = expr->arguments();
   2245   int arg_count = args->length();
   2246 
   2247   SetCallPosition(expr);
   2248   __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
   2249   __ li(a0, Operand(arg_count));
   2250   __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
   2251           RelocInfo::CODE_TARGET);
   2252   OperandStackDepthDecrement(arg_count + 1);
   2253   RestoreContext();
   2254 }
   2255 
   2256 
   2257 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
   2258   switch (expr->op()) {
   2259     case Token::DELETE: {
   2260       Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
   2261       Property* property = expr->expression()->AsProperty();
   2262       VariableProxy* proxy = expr->expression()->AsVariableProxy();
   2263 
   2264       if (property != NULL) {
   2265         VisitForStackValue(property->obj());
   2266         VisitForStackValue(property->key());
   2267         CallRuntimeWithOperands(is_strict(language_mode())
   2268                                     ? Runtime::kDeleteProperty_Strict
   2269                                     : Runtime::kDeleteProperty_Sloppy);
   2270         context()->Plug(v0);
   2271       } else if (proxy != NULL) {
   2272         Variable* var = proxy->var();
   2273         // Delete of an unqualified identifier is disallowed in strict mode but
   2274         // "delete this" is allowed.
   2275         bool is_this = var->is_this();
   2276         DCHECK(is_sloppy(language_mode()) || is_this);
   2277         if (var->IsUnallocated()) {
   2278           __ LoadGlobalObject(a2);
   2279           __ li(a1, Operand(var->name()));
   2280           __ Push(a2, a1);
   2281           __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
   2282           context()->Plug(v0);
   2283         } else {
   2284           DCHECK(!var->IsLookupSlot());
   2285           DCHECK(var->IsStackAllocated() || var->IsContextSlot());
   2286           // Result of deleting non-global, non-dynamic variables is false.
   2287           // The subexpression does not have side effects.
   2288           context()->Plug(is_this);
   2289         }
   2290       } else {
   2291         // Result of deleting non-property, non-variable reference is true.
   2292         // The subexpression may have side effects.
   2293         VisitForEffect(expr->expression());
   2294         context()->Plug(true);
   2295       }
   2296       break;
   2297     }
   2298 
   2299     case Token::VOID: {
   2300       Comment cmnt(masm_, "[ UnaryOperation (VOID)");
   2301       VisitForEffect(expr->expression());
   2302       context()->Plug(Heap::kUndefinedValueRootIndex);
   2303       break;
   2304     }
   2305 
   2306     case Token::NOT: {
   2307       Comment cmnt(masm_, "[ UnaryOperation (NOT)");
   2308       if (context()->IsEffect()) {
   2309         // Unary NOT has no side effects so it's only necessary to visit the
   2310         // subexpression.  Match the optimizing compiler by not branching.
   2311         VisitForEffect(expr->expression());
   2312       } else if (context()->IsTest()) {
   2313         const TestContext* test = TestContext::cast(context());
   2314         // The labels are swapped for the recursive call.
   2315         VisitForControl(expr->expression(),
   2316                         test->false_label(),
   2317                         test->true_label(),
   2318                         test->fall_through());
   2319         context()->Plug(test->true_label(), test->false_label());
   2320       } else {
   2321         // We handle value contexts explicitly rather than simply visiting
   2322         // for control and plugging the control flow into the context,
   2323         // because we need to prepare a pair of extra administrative AST ids
   2324         // for the optimizing compiler.
   2325         DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
   2326         Label materialize_true, materialize_false, done;
   2327         VisitForControl(expr->expression(),
   2328                         &materialize_false,
   2329                         &materialize_true,
   2330                         &materialize_true);
   2331         if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
   2332         __ bind(&materialize_true);
   2333         PrepareForBailoutForId(expr->MaterializeTrueId(),
   2334                                BailoutState::NO_REGISTERS);
   2335         __ LoadRoot(v0, Heap::kTrueValueRootIndex);
   2336         if (context()->IsStackValue()) __ push(v0);
   2337         __ jmp(&done);
   2338         __ bind(&materialize_false);
   2339         PrepareForBailoutForId(expr->MaterializeFalseId(),
   2340                                BailoutState::NO_REGISTERS);
   2341         __ LoadRoot(v0, Heap::kFalseValueRootIndex);
   2342         if (context()->IsStackValue()) __ push(v0);
   2343         __ bind(&done);
   2344       }
   2345       break;
   2346     }
   2347 
   2348     case Token::TYPEOF: {
   2349       Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
   2350       {
   2351         AccumulatorValueContext context(this);
   2352         VisitForTypeofValue(expr->expression());
   2353       }
   2354       __ mov(a3, v0);
   2355       __ Call(isolate()->builtins()->Typeof(), RelocInfo::CODE_TARGET);
   2356       context()->Plug(v0);
   2357       break;
   2358     }
   2359 
   2360     default:
   2361       UNREACHABLE();
   2362   }
   2363 }
   2364 
   2365 
   2366 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
   2367   DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
   2368 
   2369   Comment cmnt(masm_, "[ CountOperation");
   2370 
   2371   Property* prop = expr->expression()->AsProperty();
   2372   LhsKind assign_type = Property::GetAssignType(prop);
   2373 
   2374   // Evaluate expression and get value.
   2375   if (assign_type == VARIABLE) {
   2376     DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
   2377     AccumulatorValueContext context(this);
   2378     EmitVariableLoad(expr->expression()->AsVariableProxy());
   2379   } else {
   2380     // Reserve space for result of postfix operation.
   2381     if (expr->is_postfix() && !context()->IsEffect()) {
   2382       __ li(at, Operand(Smi::kZero));
   2383       PushOperand(at);
   2384     }
   2385     switch (assign_type) {
   2386       case NAMED_PROPERTY: {
   2387         // Put the object both on the stack and in the register.
   2388         VisitForStackValue(prop->obj());
   2389         __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
   2390         EmitNamedPropertyLoad(prop);
   2391         break;
   2392       }
   2393 
   2394       case KEYED_PROPERTY: {
   2395         VisitForStackValue(prop->obj());
   2396         VisitForStackValue(prop->key());
   2397         __ ld(LoadDescriptor::ReceiverRegister(),
   2398               MemOperand(sp, 1 * kPointerSize));
   2399         __ ld(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
   2400         EmitKeyedPropertyLoad(prop);
   2401         break;
   2402       }
   2403 
   2404       case NAMED_SUPER_PROPERTY:
   2405       case KEYED_SUPER_PROPERTY:
   2406       case VARIABLE:
   2407         UNREACHABLE();
   2408     }
   2409   }
   2410 
   2411   // We need a second deoptimization point after loading the value
   2412   // in case evaluating the property load my have a side effect.
   2413   if (assign_type == VARIABLE) {
   2414     PrepareForBailout(expr->expression(), BailoutState::TOS_REGISTER);
   2415   } else {
   2416     PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
   2417   }
   2418 
   2419   // Inline smi case if we are in a loop.
   2420   Label stub_call, done;
   2421   JumpPatchSite patch_site(masm_);
   2422 
   2423   int count_value = expr->op() == Token::INC ? 1 : -1;
   2424   __ mov(a0, v0);
   2425   if (ShouldInlineSmiCase(expr->op())) {
   2426     Label slow;
   2427     patch_site.EmitJumpIfNotSmi(v0, &slow);
   2428 
   2429     // Save result for postfix expressions.
   2430     if (expr->is_postfix()) {
   2431       if (!context()->IsEffect()) {
   2432         // Save the result on the stack. If we have a named or keyed property
   2433         // we store the result under the receiver that is currently on top
   2434         // of the stack.
   2435         switch (assign_type) {
   2436           case VARIABLE:
   2437             __ push(v0);
   2438             break;
   2439           case NAMED_PROPERTY:
   2440             __ sd(v0, MemOperand(sp, kPointerSize));
   2441             break;
   2442           case KEYED_PROPERTY:
   2443             __ sd(v0, MemOperand(sp, 2 * kPointerSize));
   2444             break;
   2445           case NAMED_SUPER_PROPERTY:
   2446           case KEYED_SUPER_PROPERTY:
   2447             UNREACHABLE();
   2448             break;
   2449         }
   2450       }
   2451     }
   2452 
   2453     Register scratch1 = a1;
   2454     __ li(scratch1, Operand(Smi::FromInt(count_value)));
   2455     __ DaddBranchNoOvf(v0, v0, Operand(scratch1), &done);
   2456     // Call stub. Undo operation first.
   2457     __ Move(v0, a0);
   2458     __ jmp(&stub_call);
   2459     __ bind(&slow);
   2460   }
   2461 
   2462   // Convert old value into a number.
   2463   __ Call(isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
   2464   RestoreContext();
   2465   PrepareForBailoutForId(expr->ToNumberId(), BailoutState::TOS_REGISTER);
   2466 
   2467   // Save result for postfix expressions.
   2468   if (expr->is_postfix()) {
   2469     if (!context()->IsEffect()) {
   2470       // Save the result on the stack. If we have a named or keyed property
   2471       // we store the result under the receiver that is currently on top
   2472       // of the stack.
   2473       switch (assign_type) {
   2474         case VARIABLE:
   2475           PushOperand(v0);
   2476           break;
   2477         case NAMED_PROPERTY:
   2478           __ sd(v0, MemOperand(sp, kPointerSize));
   2479           break;
   2480         case KEYED_PROPERTY:
   2481           __ sd(v0, MemOperand(sp, 2 * kPointerSize));
   2482           break;
   2483         case NAMED_SUPER_PROPERTY:
   2484         case KEYED_SUPER_PROPERTY:
   2485           UNREACHABLE();
   2486           break;
   2487       }
   2488     }
   2489   }
   2490 
   2491   __ bind(&stub_call);
   2492   __ mov(a1, v0);
   2493   __ li(a0, Operand(Smi::FromInt(count_value)));
   2494 
   2495   SetExpressionPosition(expr);
   2496 
   2497   Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code();
   2498   CallIC(code, expr->CountBinOpFeedbackId());
   2499   patch_site.EmitPatchInfo();
   2500   __ bind(&done);
   2501 
   2502   // Store the value returned in v0.
   2503   switch (assign_type) {
   2504     case VARIABLE: {
   2505       VariableProxy* proxy = expr->expression()->AsVariableProxy();
   2506       if (expr->is_postfix()) {
   2507         { EffectContext context(this);
   2508           EmitVariableAssignment(proxy->var(), Token::ASSIGN, expr->CountSlot(),
   2509                                  proxy->hole_check_mode());
   2510           PrepareForBailoutForId(expr->AssignmentId(),
   2511                                  BailoutState::TOS_REGISTER);
   2512           context.Plug(v0);
   2513         }
   2514         // For all contexts except EffectConstant we have the result on
   2515         // top of the stack.
   2516         if (!context()->IsEffect()) {
   2517           context()->PlugTOS();
   2518         }
   2519       } else {
   2520         EmitVariableAssignment(proxy->var(), Token::ASSIGN, expr->CountSlot(),
   2521                                proxy->hole_check_mode());
   2522         PrepareForBailoutForId(expr->AssignmentId(),
   2523                                BailoutState::TOS_REGISTER);
   2524         context()->Plug(v0);
   2525       }
   2526       break;
   2527     }
   2528     case NAMED_PROPERTY: {
   2529       __ mov(StoreDescriptor::ValueRegister(), result_register());
   2530       PopOperand(StoreDescriptor::ReceiverRegister());
   2531       CallStoreIC(expr->CountSlot(), prop->key()->AsLiteral()->value());
   2532       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
   2533       if (expr->is_postfix()) {
   2534         if (!context()->IsEffect()) {
   2535           context()->PlugTOS();
   2536         }
   2537       } else {
   2538         context()->Plug(v0);
   2539       }
   2540       break;
   2541     }
   2542     case KEYED_PROPERTY: {
   2543       __ mov(StoreDescriptor::ValueRegister(), result_register());
   2544       PopOperands(StoreDescriptor::ReceiverRegister(),
   2545                   StoreDescriptor::NameRegister());
   2546       CallKeyedStoreIC(expr->CountSlot());
   2547       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
   2548       if (expr->is_postfix()) {
   2549         if (!context()->IsEffect()) {
   2550           context()->PlugTOS();
   2551         }
   2552       } else {
   2553         context()->Plug(v0);
   2554       }
   2555       break;
   2556     }
   2557     case NAMED_SUPER_PROPERTY:
   2558     case KEYED_SUPER_PROPERTY:
   2559       UNREACHABLE();
   2560       break;
   2561   }
   2562 }
   2563 
   2564 
   2565 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
   2566                                                  Expression* sub_expr,
   2567                                                  Handle<String> check) {
   2568   Label materialize_true, materialize_false;
   2569   Label* if_true = NULL;
   2570   Label* if_false = NULL;
   2571   Label* fall_through = NULL;
   2572   context()->PrepareTest(&materialize_true, &materialize_false,
   2573                          &if_true, &if_false, &fall_through);
   2574 
   2575   { AccumulatorValueContext context(this);
   2576     VisitForTypeofValue(sub_expr);
   2577   }
   2578   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2579 
   2580   Factory* factory = isolate()->factory();
   2581   if (String::Equals(check, factory->number_string())) {
   2582     __ JumpIfSmi(v0, if_true);
   2583     __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
   2584     __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
   2585     Split(eq, v0, Operand(at), if_true, if_false, fall_through);
   2586   } else if (String::Equals(check, factory->string_string())) {
   2587     __ JumpIfSmi(v0, if_false);
   2588     __ GetObjectType(v0, v0, a1);
   2589     Split(lt, a1, Operand(FIRST_NONSTRING_TYPE), if_true, if_false,
   2590           fall_through);
   2591   } else if (String::Equals(check, factory->symbol_string())) {
   2592     __ JumpIfSmi(v0, if_false);
   2593     __ GetObjectType(v0, v0, a1);
   2594     Split(eq, a1, Operand(SYMBOL_TYPE), if_true, if_false, fall_through);
   2595   } else if (String::Equals(check, factory->boolean_string())) {
   2596     __ LoadRoot(at, Heap::kTrueValueRootIndex);
   2597     __ Branch(if_true, eq, v0, Operand(at));
   2598     __ LoadRoot(at, Heap::kFalseValueRootIndex);
   2599     Split(eq, v0, Operand(at), if_true, if_false, fall_through);
   2600   } else if (String::Equals(check, factory->undefined_string())) {
   2601     __ LoadRoot(at, Heap::kNullValueRootIndex);
   2602     __ Branch(if_false, eq, v0, Operand(at));
   2603     __ JumpIfSmi(v0, if_false);
   2604     // Check for undetectable objects => true.
   2605     __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
   2606     __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
   2607     __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
   2608     Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
   2609   } else if (String::Equals(check, factory->function_string())) {
   2610     __ JumpIfSmi(v0, if_false);
   2611     __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
   2612     __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
   2613     __ And(a1, a1,
   2614            Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
   2615     Split(eq, a1, Operand(1 << Map::kIsCallable), if_true, if_false,
   2616           fall_through);
   2617   } else if (String::Equals(check, factory->object_string())) {
   2618     __ JumpIfSmi(v0, if_false);
   2619     __ LoadRoot(at, Heap::kNullValueRootIndex);
   2620     __ Branch(if_true, eq, v0, Operand(at));
   2621     STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
   2622     __ GetObjectType(v0, v0, a1);
   2623     __ Branch(if_false, lt, a1, Operand(FIRST_JS_RECEIVER_TYPE));
   2624     // Check for callable or undetectable objects => false.
   2625     __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
   2626     __ And(a1, a1,
   2627            Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
   2628     Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through);
   2629   } else {
   2630     if (if_false != fall_through) __ jmp(if_false);
   2631   }
   2632   context()->Plug(if_true, if_false);
   2633 }
   2634 
   2635 
   2636 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
   2637   Comment cmnt(masm_, "[ CompareOperation");
   2638 
   2639   // First we try a fast inlined version of the compare when one of
   2640   // the operands is a literal.
   2641   if (TryLiteralCompare(expr)) return;
   2642 
   2643   // Always perform the comparison for its control flow.  Pack the result
   2644   // into the expression's context after the comparison is performed.
   2645   Label materialize_true, materialize_false;
   2646   Label* if_true = NULL;
   2647   Label* if_false = NULL;
   2648   Label* fall_through = NULL;
   2649   context()->PrepareTest(&materialize_true, &materialize_false,
   2650                          &if_true, &if_false, &fall_through);
   2651 
   2652   Token::Value op = expr->op();
   2653   VisitForStackValue(expr->left());
   2654   switch (op) {
   2655     case Token::IN:
   2656       VisitForStackValue(expr->right());
   2657       SetExpressionPosition(expr);
   2658       EmitHasProperty();
   2659       PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
   2660       __ LoadRoot(a4, Heap::kTrueValueRootIndex);
   2661       Split(eq, v0, Operand(a4), if_true, if_false, fall_through);
   2662       break;
   2663 
   2664     case Token::INSTANCEOF: {
   2665       VisitForAccumulatorValue(expr->right());
   2666       SetExpressionPosition(expr);
   2667       __ mov(a0, result_register());
   2668       PopOperand(a1);
   2669       __ Call(isolate()->builtins()->InstanceOf(), RelocInfo::CODE_TARGET);
   2670       RestoreContext();
   2671       PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
   2672       __ LoadRoot(a4, Heap::kTrueValueRootIndex);
   2673       Split(eq, v0, Operand(a4), if_true, if_false, fall_through);
   2674       break;
   2675     }
   2676 
   2677     default: {
   2678       VisitForAccumulatorValue(expr->right());
   2679       SetExpressionPosition(expr);
   2680       Condition cc = CompareIC::ComputeCondition(op);
   2681       __ mov(a0, result_register());
   2682       PopOperand(a1);
   2683 
   2684       bool inline_smi_code = ShouldInlineSmiCase(op);
   2685       JumpPatchSite patch_site(masm_);
   2686       if (inline_smi_code) {
   2687         Label slow_case;
   2688         __ Or(a2, a0, Operand(a1));
   2689         patch_site.EmitJumpIfNotSmi(a2, &slow_case);
   2690         Split(cc, a1, Operand(a0), if_true, if_false, NULL);
   2691         __ bind(&slow_case);
   2692       }
   2693 
   2694       Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
   2695       CallIC(ic, expr->CompareOperationFeedbackId());
   2696       patch_site.EmitPatchInfo();
   2697       PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2698       Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
   2699     }
   2700   }
   2701 
   2702   // Convert the result of the comparison into one expected for this
   2703   // expression's context.
   2704   context()->Plug(if_true, if_false);
   2705 }
   2706 
   2707 
   2708 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
   2709                                               Expression* sub_expr,
   2710                                               NilValue nil) {
   2711   Label materialize_true, materialize_false;
   2712   Label* if_true = NULL;
   2713   Label* if_false = NULL;
   2714   Label* fall_through = NULL;
   2715   context()->PrepareTest(&materialize_true, &materialize_false,
   2716                          &if_true, &if_false, &fall_through);
   2717 
   2718   VisitForAccumulatorValue(sub_expr);
   2719   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2720   if (expr->op() == Token::EQ_STRICT) {
   2721     Heap::RootListIndex nil_value = nil == kNullValue ?
   2722         Heap::kNullValueRootIndex :
   2723         Heap::kUndefinedValueRootIndex;
   2724     __ LoadRoot(a1, nil_value);
   2725     Split(eq, v0, Operand(a1), if_true, if_false, fall_through);
   2726   } else {
   2727     __ JumpIfSmi(v0, if_false);
   2728     __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
   2729     __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
   2730     __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
   2731     Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
   2732   }
   2733   context()->Plug(if_true, if_false);
   2734 }
   2735 
   2736 
   2737 Register FullCodeGenerator::result_register() {
   2738   return v0;
   2739 }
   2740 
   2741 
   2742 Register FullCodeGenerator::context_register() {
   2743   return cp;
   2744 }
   2745 
   2746 void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) {
   2747   // DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
   2748   DCHECK(IsAligned(frame_offset, kPointerSize));
   2749   //  __ sw(value, MemOperand(fp, frame_offset));
   2750   __ ld(value, MemOperand(fp, frame_offset));
   2751 }
   2752 
   2753 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
   2754   // DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
   2755   DCHECK(IsAligned(frame_offset, kPointerSize));
   2756   //  __ sw(value, MemOperand(fp, frame_offset));
   2757   __ sd(value, MemOperand(fp, frame_offset));
   2758 }
   2759 
   2760 
   2761 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
   2762   __ ld(dst, ContextMemOperand(cp, context_index));
   2763 }
   2764 
   2765 
   2766 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
   2767   DeclarationScope* closure_scope = scope()->GetClosureScope();
   2768   if (closure_scope->is_script_scope() ||
   2769       closure_scope->is_module_scope()) {
   2770     // Contexts nested in the native context have a canonical empty function
   2771     // as their closure, not the anonymous closure containing the global
   2772     // code.
   2773     __ LoadNativeContextSlot(Context::CLOSURE_INDEX, at);
   2774   } else if (closure_scope->is_eval_scope()) {
   2775     // Contexts created by a call to eval have the same closure as the
   2776     // context calling eval, not the anonymous closure containing the eval
   2777     // code.  Fetch it from the context.
   2778     __ ld(at, ContextMemOperand(cp, Context::CLOSURE_INDEX));
   2779   } else {
   2780     DCHECK(closure_scope->is_function_scope());
   2781     __ ld(at, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
   2782   }
   2783   PushOperand(at);
   2784 }
   2785 
   2786 
   2787 #undef __
   2788 
   2789 
   2790 void BackEdgeTable::PatchAt(Code* unoptimized_code,
   2791                             Address pc,
   2792                             BackEdgeState target_state,
   2793                             Code* replacement_code) {
   2794   static const int kInstrSize = Assembler::kInstrSize;
   2795   Address pc_immediate_load_address =
   2796       Assembler::target_address_from_return_address(pc);
   2797   Address branch_address = pc_immediate_load_address - 2 * kInstrSize;
   2798   Isolate* isolate = unoptimized_code->GetIsolate();
   2799   CodePatcher patcher(isolate, branch_address, 1);
   2800 
   2801   switch (target_state) {
   2802     case INTERRUPT:
   2803       // slt  at, a3, zero_reg (in case of count based interrupts)
   2804       // beq  at, zero_reg, ok
   2805       // lui  t9, <interrupt stub address> upper
   2806       // ori  t9, <interrupt stub address> u-middle
   2807       // dsll t9, t9, 16
   2808       // ori  t9, <interrupt stub address> lower
   2809       // jalr t9
   2810       // nop
   2811       // ok-label ----- pc_after points here
   2812       patcher.masm()->slt(at, a3, zero_reg);
   2813       break;
   2814     case ON_STACK_REPLACEMENT:
   2815       // addiu at, zero_reg, 1
   2816       // beq  at, zero_reg, ok  ;; Not changed
   2817       // lui  t9, <on-stack replacement address> upper
   2818       // ori  t9, <on-stack replacement address> middle
   2819       // dsll t9, t9, 16
   2820       // ori  t9, <on-stack replacement address> lower
   2821       // jalr t9  ;; Not changed
   2822       // nop  ;; Not changed
   2823       // ok-label ----- pc_after points here
   2824       patcher.masm()->daddiu(at, zero_reg, 1);
   2825       break;
   2826   }
   2827   // Replace the stack check address in the load-immediate (6-instr sequence)
   2828   // with the entry address of the replacement code.
   2829   Assembler::set_target_address_at(isolate, pc_immediate_load_address,
   2830                                    replacement_code->entry());
   2831 
   2832   unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
   2833       unoptimized_code, pc_immediate_load_address, replacement_code);
   2834 }
   2835 
   2836 
   2837 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
   2838     Isolate* isolate,
   2839     Code* unoptimized_code,
   2840     Address pc) {
   2841   static const int kInstrSize = Assembler::kInstrSize;
   2842   Address pc_immediate_load_address =
   2843       Assembler::target_address_from_return_address(pc);
   2844   Address branch_address = pc_immediate_load_address - 2 * kInstrSize;
   2845 
   2846   DCHECK(Assembler::IsBeq(Assembler::instr_at(branch_address + kInstrSize)));
   2847   if (!Assembler::IsAddImmediate(Assembler::instr_at(branch_address))) {
   2848     DCHECK(reinterpret_cast<uint64_t>(
   2849         Assembler::target_address_at(pc_immediate_load_address)) ==
   2850            reinterpret_cast<uint64_t>(
   2851                isolate->builtins()->InterruptCheck()->entry()));
   2852     return INTERRUPT;
   2853   }
   2854 
   2855   DCHECK(Assembler::IsAddImmediate(Assembler::instr_at(branch_address)));
   2856 
   2857   DCHECK(reinterpret_cast<uint64_t>(
   2858              Assembler::target_address_at(pc_immediate_load_address)) ==
   2859          reinterpret_cast<uint64_t>(
   2860              isolate->builtins()->OnStackReplacement()->entry()));
   2861   return ON_STACK_REPLACEMENT;
   2862 }
   2863 
   2864 
   2865 }  // namespace internal
   2866 }  // namespace v8
   2867 
   2868 #endif  // V8_TARGET_ARCH_MIPS64
   2869