Home | History | Annotate | Download | only in mips
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #if V8_TARGET_ARCH_MIPS
      6 
      7 // Note on Mips implementation:
      8 //
      9 // The result_register() for mips is the 'v0' register, which is defined
     10 // by the ABI to contain function return values. However, the first
     11 // parameter to a function is defined to be 'a0'. So there are many
     12 // places where we have to move a previous result in v0 to a0 for the
     13 // next call: mov(a0, v0). This is not needed on the other architectures.
     14 
     15 #include "src/ast/compile-time-value.h"
     16 #include "src/ast/scopes.h"
     17 #include "src/builtins/builtins-constructor.h"
     18 #include "src/code-factory.h"
     19 #include "src/code-stubs.h"
     20 #include "src/codegen.h"
     21 #include "src/compilation-info.h"
     22 #include "src/compiler.h"
     23 #include "src/debug/debug.h"
     24 #include "src/full-codegen/full-codegen.h"
     25 #include "src/ic/ic.h"
     26 
     27 #include "src/mips/code-stubs-mips.h"
     28 #include "src/mips/macro-assembler-mips.h"
     29 
     30 namespace v8 {
     31 namespace internal {
     32 
     33 #define __ ACCESS_MASM(masm())
     34 
     35 // A patch site is a location in the code which it is possible to patch. This
     36 // class has a number of methods to emit the code which is patchable and the
     37 // method EmitPatchInfo to record a marker back to the patchable code. This
     38 // marker is a andi zero_reg, rx, #yyyy instruction, and rx * 0x0000ffff + yyyy
     39 // (raw 16 bit immediate value is used) is the delta from the pc to the first
     40 // instruction of the patchable code.
     41 // The marker instruction is effectively a NOP (dest is zero_reg) and will
     42 // never be emitted by normal code.
     43 class JumpPatchSite BASE_EMBEDDED {
     44  public:
     45   explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
     46 #ifdef DEBUG
     47     info_emitted_ = false;
     48 #endif
     49   }
     50 
     51   ~JumpPatchSite() {
     52     DCHECK(patch_site_.is_bound() == info_emitted_);
     53   }
     54 
     55   // When initially emitting this ensure that a jump is always generated to skip
     56   // the inlined smi code.
     57   void EmitJumpIfNotSmi(Register reg, Label* target) {
     58     DCHECK(!patch_site_.is_bound() && !info_emitted_);
     59     Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
     60     __ bind(&patch_site_);
     61     __ andi(at, reg, 0);
     62     // Always taken before patched.
     63     __ BranchShort(target, eq, at, Operand(zero_reg));
     64   }
     65 
     66   // When initially emitting this ensure that a jump is never generated to skip
     67   // the inlined smi code.
     68   void EmitJumpIfSmi(Register reg, Label* target) {
     69     Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
     70     DCHECK(!patch_site_.is_bound() && !info_emitted_);
     71     __ bind(&patch_site_);
     72     __ andi(at, reg, 0);
     73     // Never taken before patched.
     74     __ BranchShort(target, ne, at, Operand(zero_reg));
     75   }
     76 
     77   void EmitPatchInfo() {
     78     if (patch_site_.is_bound()) {
     79       int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
     80       Register reg = Register::from_code(delta_to_patch_site / kImm16Mask);
     81       __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask);
     82 #ifdef DEBUG
     83       info_emitted_ = true;
     84 #endif
     85     } else {
     86       __ nop();  // Signals no inlined code.
     87     }
     88   }
     89 
     90  private:
     91   MacroAssembler* masm() { return masm_; }
     92   MacroAssembler* masm_;
     93   Label patch_site_;
     94 #ifdef DEBUG
     95   bool info_emitted_;
     96 #endif
     97 };
     98 
     99 
    100 // Generate code for a JS function.  On entry to the function the receiver
    101 // and arguments have been pushed on the stack left to right.  The actual
    102 // argument count matches the formal parameter count expected by the
    103 // function.
    104 //
    105 // The live registers are:
    106 //   o a1: the JS function object being called (i.e. ourselves)
    107 //   o a3: the new target value
    108 //   o cp: our context
    109 //   o fp: our caller's frame pointer
    110 //   o sp: stack pointer
    111 //   o ra: return address
    112 //
    113 // The function builds a JS frame.  Please see JavaScriptFrameConstants in
    114 // frames-mips.h for its layout.
    115 void FullCodeGenerator::Generate() {
    116   CompilationInfo* info = info_;
    117   profiling_counter_ = isolate()->factory()->NewCell(
    118       Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
    119   SetFunctionPosition(literal());
    120   Comment cmnt(masm_, "[ function compiled by full code generator");
    121 
    122   ProfileEntryHookStub::MaybeCallEntryHook(masm_);
    123 
    124   if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
    125     int receiver_offset = info->scope()->num_parameters() * kPointerSize;
    126     __ lw(a2, MemOperand(sp, receiver_offset));
    127     __ AssertNotSmi(a2);
    128     __ GetObjectType(a2, a2, a2);
    129     __ Check(ge, kSloppyFunctionExpectsJSReceiverReceiver, a2,
    130              Operand(FIRST_JS_RECEIVER_TYPE));
    131   }
    132 
    133   // Open a frame scope to indicate that there is a frame on the stack.  The
    134   // MANUAL indicates that the scope shouldn't actually generate code to set up
    135   // the frame (that is done below).
    136   FrameScope frame_scope(masm_, StackFrame::MANUAL);
    137 
    138   info->set_prologue_offset(masm_->pc_offset());
    139   __ Prologue(info->GeneratePreagedPrologue());
    140 
    141   // Increment invocation count for the function.
    142   {
    143     Comment cmnt(masm_, "[ Increment invocation count");
    144     __ lw(a0, FieldMemOperand(a1, JSFunction::kFeedbackVectorOffset));
    145     __ lw(a0, FieldMemOperand(a0, Cell::kValueOffset));
    146     __ lw(t0, FieldMemOperand(
    147                   a0, FeedbackVector::kInvocationCountIndex * kPointerSize +
    148                           FeedbackVector::kHeaderSize));
    149     __ Addu(t0, t0, Operand(Smi::FromInt(1)));
    150     __ sw(t0, FieldMemOperand(
    151                   a0, FeedbackVector::kInvocationCountIndex * kPointerSize +
    152                           FeedbackVector::kHeaderSize));
    153   }
    154 
    155   { Comment cmnt(masm_, "[ Allocate locals");
    156     int locals_count = info->scope()->num_stack_slots();
    157     OperandStackDepthIncrement(locals_count);
    158     if (locals_count > 0) {
    159       if (locals_count >= 128) {
    160         Label ok;
    161         __ Subu(t5, sp, Operand(locals_count * kPointerSize));
    162         __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
    163         __ Branch(&ok, hs, t5, Operand(a2));
    164         __ CallRuntime(Runtime::kThrowStackOverflow);
    165         __ bind(&ok);
    166       }
    167       __ LoadRoot(t5, Heap::kUndefinedValueRootIndex);
    168       int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
    169       if (locals_count >= kMaxPushes) {
    170         int loop_iterations = locals_count / kMaxPushes;
    171         __ li(a2, Operand(loop_iterations));
    172         Label loop_header;
    173         __ bind(&loop_header);
    174         // Do pushes.
    175         __ Subu(sp, sp, Operand(kMaxPushes * kPointerSize));
    176         for (int i = 0; i < kMaxPushes; i++) {
    177           __ sw(t5, MemOperand(sp, i * kPointerSize));
    178         }
    179         // Continue loop if not done.
    180         __ Subu(a2, a2, Operand(1));
    181         __ Branch(&loop_header, ne, a2, Operand(zero_reg));
    182       }
    183       int remaining = locals_count % kMaxPushes;
    184       // Emit the remaining pushes.
    185       __ Subu(sp, sp, Operand(remaining * kPointerSize));
    186       for (int i  = 0; i < remaining; i++) {
    187         __ sw(t5, MemOperand(sp, i * kPointerSize));
    188       }
    189     }
    190   }
    191 
    192   bool function_in_register_a1 = true;
    193 
    194   // Possibly allocate a local context.
    195   if (info->scope()->NeedsContext()) {
    196     Comment cmnt(masm_, "[ Allocate context");
    197     // Argument to NewContext is the function, which is still in a1.
    198     bool need_write_barrier = true;
    199     int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
    200     if (info->scope()->is_script_scope()) {
    201       __ push(a1);
    202       __ Push(info->scope()->scope_info());
    203       __ CallRuntime(Runtime::kNewScriptContext);
    204       PrepareForBailoutForId(BailoutId::ScriptContext(),
    205                              BailoutState::TOS_REGISTER);
    206       // The new target value is not used, clobbering is safe.
    207       DCHECK_NULL(info->scope()->new_target_var());
    208     } else {
    209       if (info->scope()->new_target_var() != nullptr) {
    210         __ push(a3);  // Preserve new target.
    211       }
    212       if (slots <=
    213           ConstructorBuiltinsAssembler::MaximumFunctionContextSlots()) {
    214         Callable callable = CodeFactory::FastNewFunctionContext(
    215             isolate(), info->scope()->scope_type());
    216         __ li(FastNewFunctionContextDescriptor::SlotsRegister(),
    217               Operand(slots));
    218         __ Call(callable.code(), RelocInfo::CODE_TARGET);
    219         // Result of the FastNewFunctionContext builtin is always in new space.
    220         need_write_barrier = false;
    221       } else {
    222         __ push(a1);
    223         __ Push(Smi::FromInt(info->scope()->scope_type()));
    224         __ CallRuntime(Runtime::kNewFunctionContext);
    225       }
    226       if (info->scope()->new_target_var() != nullptr) {
    227         __ pop(a3);  // Restore new target.
    228       }
    229     }
    230     function_in_register_a1 = false;
    231     // Context is returned in v0. It replaces the context passed to us.
    232     // It's saved in the stack and kept live in cp.
    233     __ mov(cp, v0);
    234     __ sw(v0, MemOperand(fp, StandardFrameConstants::kContextOffset));
    235     // Copy any necessary parameters into the context.
    236     int num_parameters = info->scope()->num_parameters();
    237     int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
    238     for (int i = first_parameter; i < num_parameters; i++) {
    239       Variable* var =
    240           (i == -1) ? info->scope()->receiver() : info->scope()->parameter(i);
    241       if (var->IsContextSlot()) {
    242         int parameter_offset = StandardFrameConstants::kCallerSPOffset +
    243                                  (num_parameters - 1 - i) * kPointerSize;
    244         // Load parameter from stack.
    245         __ lw(a0, MemOperand(fp, parameter_offset));
    246         // Store it in the context.
    247         MemOperand target = ContextMemOperand(cp, var->index());
    248         __ sw(a0, target);
    249 
    250         // Update the write barrier.
    251         if (need_write_barrier) {
    252           __ RecordWriteContextSlot(cp, target.offset(), a0, a2,
    253                                     kRAHasBeenSaved, kDontSaveFPRegs);
    254         } else if (FLAG_debug_code) {
    255           Label done;
    256           __ JumpIfInNewSpace(cp, a0, &done);
    257           __ Abort(kExpectedNewSpaceObject);
    258           __ bind(&done);
    259         }
    260       }
    261     }
    262   }
    263 
    264   // Register holding this function and new target are both trashed in case we
    265   // bailout here. But since that can happen only when new target is not used
    266   // and we allocate a context, the value of |function_in_register| is correct.
    267   PrepareForBailoutForId(BailoutId::FunctionContext(),
    268                          BailoutState::NO_REGISTERS);
    269 
    270   // We don't support new.target and rest parameters here.
    271   DCHECK_NULL(info->scope()->new_target_var());
    272   DCHECK_NULL(info->scope()->rest_parameter());
    273   DCHECK_NULL(info->scope()->this_function_var());
    274 
    275   Variable* arguments = info->scope()->arguments();
    276   if (arguments != NULL) {
    277     // Function uses arguments object.
    278     Comment cmnt(masm_, "[ Allocate arguments object");
    279     if (!function_in_register_a1) {
    280       // Load this again, if it's used by the local context below.
    281       __ lw(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
    282     }
    283     if (is_strict(language_mode()) || !has_simple_parameters()) {
    284       Callable callable = CodeFactory::FastNewStrictArguments(isolate());
    285       __ Call(callable.code(), RelocInfo::CODE_TARGET);
    286       RestoreContext();
    287     } else if (literal()->has_duplicate_parameters()) {
    288       __ Push(a1);
    289       __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
    290     } else {
    291       Callable callable = CodeFactory::FastNewSloppyArguments(isolate());
    292       __ Call(callable.code(), RelocInfo::CODE_TARGET);
    293       RestoreContext();
    294     }
    295 
    296     SetVar(arguments, v0, a1, a2);
    297   }
    298 
    299   if (FLAG_trace) {
    300     __ CallRuntime(Runtime::kTraceEnter);
    301   }
    302 
    303   // Visit the declarations and body unless there is an illegal
    304   // redeclaration.
    305   PrepareForBailoutForId(BailoutId::FunctionEntry(),
    306                          BailoutState::NO_REGISTERS);
    307   {
    308     Comment cmnt(masm_, "[ Declarations");
    309     VisitDeclarations(scope()->declarations());
    310   }
    311 
    312   // Assert that the declarations do not use ICs. Otherwise the debugger
    313   // won't be able to redirect a PC at an IC to the correct IC in newly
    314   // recompiled code.
    315   DCHECK_EQ(0, ic_total_count_);
    316 
    317   {
    318     Comment cmnt(masm_, "[ Stack check");
    319     PrepareForBailoutForId(BailoutId::Declarations(),
    320                            BailoutState::NO_REGISTERS);
    321     Label ok;
    322     __ LoadRoot(at, Heap::kStackLimitRootIndex);
    323     __ Branch(&ok, hs, sp, Operand(at));
    324     Handle<Code> stack_check = isolate()->builtins()->StackCheck();
    325     PredictableCodeSizeScope predictable(
    326         masm_, masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
    327     __ Call(stack_check, RelocInfo::CODE_TARGET);
    328     __ bind(&ok);
    329   }
    330 
    331   {
    332     Comment cmnt(masm_, "[ Body");
    333     DCHECK(loop_depth() == 0);
    334     VisitStatements(literal()->body());
    335     DCHECK(loop_depth() == 0);
    336   }
    337 
    338   // Always emit a 'return undefined' in case control fell off the end of
    339   // the body.
    340   { Comment cmnt(masm_, "[ return <undefined>;");
    341     __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
    342   }
    343   EmitReturnSequence();
    344 }
    345 
    346 
    347 void FullCodeGenerator::ClearAccumulator() {
    348   DCHECK(Smi::kZero == 0);
    349   __ mov(v0, zero_reg);
    350 }
    351 
    352 
    353 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
    354   __ li(a2, Operand(profiling_counter_));
    355   __ lw(a3, FieldMemOperand(a2, Cell::kValueOffset));
    356   __ Subu(a3, a3, Operand(Smi::FromInt(delta)));
    357   __ sw(a3, FieldMemOperand(a2, Cell::kValueOffset));
    358 }
    359 
    360 
    361 void FullCodeGenerator::EmitProfilingCounterReset() {
    362   int reset_value = FLAG_interrupt_budget;
    363   if (info_->is_debug()) {
    364     // Detect debug break requests as soon as possible.
    365     reset_value = FLAG_interrupt_budget >> 4;
    366   }
    367   __ li(a2, Operand(profiling_counter_));
    368   __ li(a3, Operand(Smi::FromInt(reset_value)));
    369   __ sw(a3, FieldMemOperand(a2, Cell::kValueOffset));
    370 }
    371 
    372 
    373 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
    374                                                 Label* back_edge_target) {
    375   // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need
    376   // to make sure it is constant. Branch may emit a skip-or-jump sequence
    377   // instead of the normal Branch. It seems that the "skip" part of that
    378   // sequence is about as long as this Branch would be so it is safe to ignore
    379   // that.
    380   Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
    381   Comment cmnt(masm_, "[ Back edge bookkeeping");
    382   Label ok;
    383   DCHECK(back_edge_target->is_bound());
    384   int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
    385   int weight = Min(kMaxBackEdgeWeight,
    386                    Max(1, distance / kCodeSizeMultiplier));
    387   EmitProfilingCounterDecrement(weight);
    388   __ slt(at, a3, zero_reg);
    389   __ beq(at, zero_reg, &ok);
    390   // Call will emit a li t9 first, so it is safe to use the delay slot.
    391   __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
    392   // Record a mapping of this PC offset to the OSR id.  This is used to find
    393   // the AST id from the unoptimized code in order to use it as a key into
    394   // the deoptimization input data found in the optimized code.
    395   RecordBackEdge(stmt->OsrEntryId());
    396   EmitProfilingCounterReset();
    397 
    398   __ bind(&ok);
    399   PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
    400   // Record a mapping of the OSR id to this PC.  This is used if the OSR
    401   // entry becomes the target of a bailout.  We don't expect it to be, but
    402   // we want it to work if it is.
    403   PrepareForBailoutForId(stmt->OsrEntryId(), BailoutState::NO_REGISTERS);
    404 }
    405 
    406 void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
    407     bool is_tail_call) {
    408   // Pretend that the exit is a backwards jump to the entry.
    409   int weight = 1;
    410   if (info_->ShouldSelfOptimize()) {
    411     weight = FLAG_interrupt_budget / FLAG_self_opt_count;
    412   } else {
    413     int distance = masm_->pc_offset();
    414     weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
    415   }
    416   EmitProfilingCounterDecrement(weight);
    417   Label ok;
    418   __ Branch(&ok, ge, a3, Operand(zero_reg));
    419   // Don't need to save result register if we are going to do a tail call.
    420   if (!is_tail_call) {
    421     __ push(v0);
    422   }
    423   __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
    424   if (!is_tail_call) {
    425     __ pop(v0);
    426   }
    427   EmitProfilingCounterReset();
    428   __ bind(&ok);
    429 }
    430 
    431 void FullCodeGenerator::EmitReturnSequence() {
    432   Comment cmnt(masm_, "[ Return sequence");
    433   if (return_label_.is_bound()) {
    434     __ Branch(&return_label_);
    435   } else {
    436     __ bind(&return_label_);
    437     if (FLAG_trace) {
    438       // Push the return value on the stack as the parameter.
    439       // Runtime::TraceExit returns its parameter in v0.
    440       __ push(v0);
    441       __ CallRuntime(Runtime::kTraceExit);
    442     }
    443     EmitProfilingCounterHandlingForReturnSequence(false);
    444 
    445     // Make sure that the constant pool is not emitted inside of the return
    446     // sequence.
    447     { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
    448       int32_t arg_count = info_->scope()->num_parameters() + 1;
    449       int32_t sp_delta = arg_count * kPointerSize;
    450       SetReturnPosition(literal());
    451       __ mov(sp, fp);
    452       __ MultiPop(static_cast<RegList>(fp.bit() | ra.bit()));
    453       __ Addu(sp, sp, Operand(sp_delta));
    454       __ Jump(ra);
    455     }
    456   }
    457 }
    458 
    459 void FullCodeGenerator::RestoreContext() {
    460   __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
    461 }
    462 
    463 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
    464   DCHECK(var->IsStackAllocated() || var->IsContextSlot());
    465   codegen()->GetVar(result_register(), var);
    466   codegen()->PushOperand(result_register());
    467 }
    468 
    469 
    470 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
    471 }
    472 
    473 
    474 void FullCodeGenerator::AccumulatorValueContext::Plug(
    475     Heap::RootListIndex index) const {
    476   __ LoadRoot(result_register(), index);
    477 }
    478 
    479 
    480 void FullCodeGenerator::StackValueContext::Plug(
    481     Heap::RootListIndex index) const {
    482   __ LoadRoot(result_register(), index);
    483   codegen()->PushOperand(result_register());
    484 }
    485 
    486 
    487 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
    488   codegen()->PrepareForBailoutBeforeSplit(condition(),
    489                                           true,
    490                                           true_label_,
    491                                           false_label_);
    492   if (index == Heap::kUndefinedValueRootIndex ||
    493       index == Heap::kNullValueRootIndex ||
    494       index == Heap::kFalseValueRootIndex) {
    495     if (false_label_ != fall_through_) __ Branch(false_label_);
    496   } else if (index == Heap::kTrueValueRootIndex) {
    497     if (true_label_ != fall_through_) __ Branch(true_label_);
    498   } else {
    499     __ LoadRoot(result_register(), index);
    500     codegen()->DoTest(this);
    501   }
    502 }
    503 
    504 
    505 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
    506 }
    507 
    508 
    509 void FullCodeGenerator::AccumulatorValueContext::Plug(
    510     Handle<Object> lit) const {
    511   __ li(result_register(), Operand(lit));
    512 }
    513 
    514 
    515 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
    516   // Immediates cannot be pushed directly.
    517   __ li(result_register(), Operand(lit));
    518   codegen()->PushOperand(result_register());
    519 }
    520 
    521 
    522 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
    523   codegen()->PrepareForBailoutBeforeSplit(condition(),
    524                                           true,
    525                                           true_label_,
    526                                           false_label_);
    527   DCHECK(lit->IsNullOrUndefined(isolate()) || !lit->IsUndetectable());
    528   if (lit->IsNullOrUndefined(isolate()) || lit->IsFalse(isolate())) {
    529     if (false_label_ != fall_through_) __ Branch(false_label_);
    530   } else if (lit->IsTrue(isolate()) || lit->IsJSObject()) {
    531     if (true_label_ != fall_through_) __ Branch(true_label_);
    532   } else if (lit->IsString()) {
    533     if (String::cast(*lit)->length() == 0) {
    534       if (false_label_ != fall_through_) __ Branch(false_label_);
    535     } else {
    536       if (true_label_ != fall_through_) __ Branch(true_label_);
    537     }
    538   } else if (lit->IsSmi()) {
    539     if (Smi::cast(*lit)->value() == 0) {
    540       if (false_label_ != fall_through_) __ Branch(false_label_);
    541     } else {
    542       if (true_label_ != fall_through_) __ Branch(true_label_);
    543     }
    544   } else {
    545     // For simplicity we always test the accumulator register.
    546     __ li(result_register(), Operand(lit));
    547     codegen()->DoTest(this);
    548   }
    549 }
    550 
    551 
    552 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
    553                                                        Register reg) const {
    554   DCHECK(count > 0);
    555   if (count > 1) codegen()->DropOperands(count - 1);
    556   __ sw(reg, MemOperand(sp, 0));
    557 }
    558 
    559 
    560 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
    561                                             Label* materialize_false) const {
    562   DCHECK(materialize_true == materialize_false);
    563   __ bind(materialize_true);
    564 }
    565 
    566 
    567 void FullCodeGenerator::AccumulatorValueContext::Plug(
    568     Label* materialize_true,
    569     Label* materialize_false) const {
    570   Label done;
    571   __ bind(materialize_true);
    572   __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
    573   __ Branch(&done);
    574   __ bind(materialize_false);
    575   __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
    576   __ bind(&done);
    577 }
    578 
    579 
    580 void FullCodeGenerator::StackValueContext::Plug(
    581     Label* materialize_true,
    582     Label* materialize_false) const {
    583   codegen()->OperandStackDepthIncrement(1);
    584   Label done;
    585   __ bind(materialize_true);
    586   __ LoadRoot(at, Heap::kTrueValueRootIndex);
    587   // Push the value as the following branch can clobber at in long branch mode.
    588   __ push(at);
    589   __ Branch(&done);
    590   __ bind(materialize_false);
    591   __ LoadRoot(at, Heap::kFalseValueRootIndex);
    592   __ push(at);
    593   __ bind(&done);
    594 }
    595 
    596 
    597 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
    598                                           Label* materialize_false) const {
    599   DCHECK(materialize_true == true_label_);
    600   DCHECK(materialize_false == false_label_);
    601 }
    602 
    603 
    604 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
    605   Heap::RootListIndex value_root_index =
    606       flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
    607   __ LoadRoot(result_register(), value_root_index);
    608 }
    609 
    610 
    611 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
    612   Heap::RootListIndex value_root_index =
    613       flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
    614   __ LoadRoot(at, value_root_index);
    615   codegen()->PushOperand(at);
    616 }
    617 
    618 
    619 void FullCodeGenerator::TestContext::Plug(bool flag) const {
    620   codegen()->PrepareForBailoutBeforeSplit(condition(),
    621                                           true,
    622                                           true_label_,
    623                                           false_label_);
    624   if (flag) {
    625     if (true_label_ != fall_through_) __ Branch(true_label_);
    626   } else {
    627     if (false_label_ != fall_through_) __ Branch(false_label_);
    628   }
    629 }
    630 
    631 
    632 void FullCodeGenerator::DoTest(Expression* condition,
    633                                Label* if_true,
    634                                Label* if_false,
    635                                Label* fall_through) {
    636   __ mov(a0, result_register());
    637   Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate());
    638   CallIC(ic, condition->test_id());
    639   __ LoadRoot(at, Heap::kTrueValueRootIndex);
    640   Split(eq, result_register(), Operand(at), if_true, if_false, fall_through);
    641 }
    642 
    643 
    644 void FullCodeGenerator::Split(Condition cc,
    645                               Register lhs,
    646                               const Operand&  rhs,
    647                               Label* if_true,
    648                               Label* if_false,
    649                               Label* fall_through) {
    650   if (if_false == fall_through) {
    651     __ Branch(if_true, cc, lhs, rhs);
    652   } else if (if_true == fall_through) {
    653     __ Branch(if_false, NegateCondition(cc), lhs, rhs);
    654   } else {
    655     __ Branch(if_true, cc, lhs, rhs);
    656     __ Branch(if_false);
    657   }
    658 }
    659 
    660 
    661 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
    662   DCHECK(var->IsStackAllocated());
    663   // Offset is negative because higher indexes are at lower addresses.
    664   int offset = -var->index() * kPointerSize;
    665   // Adjust by a (parameter or local) base offset.
    666   if (var->IsParameter()) {
    667     offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
    668   } else {
    669     offset += JavaScriptFrameConstants::kLocal0Offset;
    670   }
    671   return MemOperand(fp, offset);
    672 }
    673 
    674 
    675 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
    676   DCHECK(var->IsContextSlot() || var->IsStackAllocated());
    677   if (var->IsContextSlot()) {
    678     int context_chain_length = scope()->ContextChainLength(var->scope());
    679     __ LoadContext(scratch, context_chain_length);
    680     return ContextMemOperand(scratch, var->index());
    681   } else {
    682     return StackOperand(var);
    683   }
    684 }
    685 
    686 
    687 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
    688   // Use destination as scratch.
    689   MemOperand location = VarOperand(var, dest);
    690   __ lw(dest, location);
    691 }
    692 
    693 
    694 void FullCodeGenerator::SetVar(Variable* var,
    695                                Register src,
    696                                Register scratch0,
    697                                Register scratch1) {
    698   DCHECK(var->IsContextSlot() || var->IsStackAllocated());
    699   DCHECK(!scratch0.is(src));
    700   DCHECK(!scratch0.is(scratch1));
    701   DCHECK(!scratch1.is(src));
    702   MemOperand location = VarOperand(var, scratch0);
    703   __ sw(src, location);
    704   // Emit the write barrier code if the location is in the heap.
    705   if (var->IsContextSlot()) {
    706     __ RecordWriteContextSlot(scratch0,
    707                               location.offset(),
    708                               src,
    709                               scratch1,
    710                               kRAHasBeenSaved,
    711                               kDontSaveFPRegs);
    712   }
    713 }
    714 
    715 
    716 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
    717                                                      bool should_normalize,
    718                                                      Label* if_true,
    719                                                      Label* if_false) {
    720   // Only prepare for bailouts before splits if we're in a test
    721   // context. Otherwise, we let the Visit function deal with the
    722   // preparation to avoid preparing with the same AST id twice.
    723   if (!context()->IsTest()) return;
    724 
    725   Label skip;
    726   if (should_normalize) __ Branch(&skip);
    727   PrepareForBailout(expr, BailoutState::TOS_REGISTER);
    728   if (should_normalize) {
    729     __ LoadRoot(t0, Heap::kTrueValueRootIndex);
    730     Split(eq, v0, Operand(t0), if_true, if_false, NULL);
    731     __ bind(&skip);
    732   }
    733 }
    734 
    735 
    736 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
    737   // The variable in the declaration always resides in the current function
    738   // context.
    739   DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
    740   if (FLAG_debug_code) {
    741     // Check that we're not inside a with or catch context.
    742     __ lw(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
    743     __ LoadRoot(t0, Heap::kWithContextMapRootIndex);
    744     __ Check(ne, kDeclarationInWithContext,
    745         a1, Operand(t0));
    746     __ LoadRoot(t0, Heap::kCatchContextMapRootIndex);
    747     __ Check(ne, kDeclarationInCatchContext,
    748         a1, Operand(t0));
    749   }
    750 }
    751 
    752 
    753 void FullCodeGenerator::VisitVariableDeclaration(
    754     VariableDeclaration* declaration) {
    755   VariableProxy* proxy = declaration->proxy();
    756   Variable* variable = proxy->var();
    757   switch (variable->location()) {
    758     case VariableLocation::UNALLOCATED: {
    759       DCHECK(!variable->binding_needs_init());
    760       globals_->Add(variable->name(), zone());
    761       FeedbackSlot slot = proxy->VariableFeedbackSlot();
    762       DCHECK(!slot.IsInvalid());
    763       globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
    764       globals_->Add(isolate()->factory()->undefined_value(), zone());
    765       globals_->Add(isolate()->factory()->undefined_value(), zone());
    766       break;
    767     }
    768     case VariableLocation::PARAMETER:
    769     case VariableLocation::LOCAL:
    770       if (variable->binding_needs_init()) {
    771         Comment cmnt(masm_, "[ VariableDeclaration");
    772         __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
    773         __ sw(t0, StackOperand(variable));
    774       }
    775       break;
    776 
    777     case VariableLocation::CONTEXT:
    778       if (variable->binding_needs_init()) {
    779         Comment cmnt(masm_, "[ VariableDeclaration");
    780         EmitDebugCheckDeclarationContext(variable);
    781           __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
    782           __ sw(at, ContextMemOperand(cp, variable->index()));
    783           // No write barrier since the_hole_value is in old space.
    784           PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
    785       }
    786       break;
    787 
    788     case VariableLocation::LOOKUP:
    789     case VariableLocation::MODULE:
    790       UNREACHABLE();
    791   }
    792 }
    793 
    794 
    795 void FullCodeGenerator::VisitFunctionDeclaration(
    796     FunctionDeclaration* declaration) {
    797   VariableProxy* proxy = declaration->proxy();
    798   Variable* variable = proxy->var();
    799   switch (variable->location()) {
    800     case VariableLocation::UNALLOCATED: {
    801       globals_->Add(variable->name(), zone());
    802       FeedbackSlot slot = proxy->VariableFeedbackSlot();
    803       DCHECK(!slot.IsInvalid());
    804       globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
    805 
    806       // We need the slot where the literals array lives, too.
    807       slot = declaration->fun()->LiteralFeedbackSlot();
    808       DCHECK(!slot.IsInvalid());
    809       globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
    810 
    811       Handle<SharedFunctionInfo> function =
    812           Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
    813       // Check for stack-overflow exception.
    814       if (function.is_null()) return SetStackOverflow();
    815       globals_->Add(function, zone());
    816       break;
    817     }
    818 
    819     case VariableLocation::PARAMETER:
    820     case VariableLocation::LOCAL: {
    821       Comment cmnt(masm_, "[ FunctionDeclaration");
    822       VisitForAccumulatorValue(declaration->fun());
    823       __ sw(result_register(), StackOperand(variable));
    824       break;
    825     }
    826 
    827     case VariableLocation::CONTEXT: {
    828       Comment cmnt(masm_, "[ FunctionDeclaration");
    829       EmitDebugCheckDeclarationContext(variable);
    830       VisitForAccumulatorValue(declaration->fun());
    831       __ sw(result_register(), ContextMemOperand(cp, variable->index()));
    832       int offset = Context::SlotOffset(variable->index());
    833       // We know that we have written a function, which is not a smi.
    834       __ RecordWriteContextSlot(cp,
    835                                 offset,
    836                                 result_register(),
    837                                 a2,
    838                                 kRAHasBeenSaved,
    839                                 kDontSaveFPRegs,
    840                                 EMIT_REMEMBERED_SET,
    841                                 OMIT_SMI_CHECK);
    842       PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
    843       break;
    844     }
    845 
    846     case VariableLocation::LOOKUP:
    847     case VariableLocation::MODULE:
    848       UNREACHABLE();
    849   }
    850 }
    851 
    852 
    853 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
    854   // Call the runtime to declare the globals.
    855   __ li(a1, Operand(pairs));
    856   __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
    857   __ EmitLoadFeedbackVector(a2);
    858   __ Push(a1, a0, a2);
    859   __ CallRuntime(Runtime::kDeclareGlobals);
    860   // Return value is ignored.
    861 }
    862 
    863 
    864 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
    865   Comment cmnt(masm_, "[ SwitchStatement");
    866   Breakable nested_statement(this, stmt);
    867   SetStatementPosition(stmt);
    868 
    869   // Keep the switch value on the stack until a case matches.
    870   VisitForStackValue(stmt->tag());
    871   PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
    872 
    873   ZoneList<CaseClause*>* clauses = stmt->cases();
    874   CaseClause* default_clause = NULL;  // Can occur anywhere in the list.
    875 
    876   Label next_test;  // Recycled for each test.
    877   // Compile all the tests with branches to their bodies.
    878   for (int i = 0; i < clauses->length(); i++) {
    879     CaseClause* clause = clauses->at(i);
    880     clause->body_target()->Unuse();
    881 
    882     // The default is not a test, but remember it as final fall through.
    883     if (clause->is_default()) {
    884       default_clause = clause;
    885       continue;
    886     }
    887 
    888     Comment cmnt(masm_, "[ Case comparison");
    889     __ bind(&next_test);
    890     next_test.Unuse();
    891 
    892     // Compile the label expression.
    893     VisitForAccumulatorValue(clause->label());
    894     __ mov(a0, result_register());  // CompareStub requires args in a0, a1.
    895 
    896     // Perform the comparison as if via '==='.
    897     __ lw(a1, MemOperand(sp, 0));  // Switch value.
    898     bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
    899     JumpPatchSite patch_site(masm_);
    900     if (inline_smi_code) {
    901       Label slow_case;
    902       __ or_(a2, a1, a0);
    903       patch_site.EmitJumpIfNotSmi(a2, &slow_case);
    904 
    905       __ Branch(&next_test, ne, a1, Operand(a0));
    906       __ Drop(1);  // Switch value is no longer needed.
    907       __ Branch(clause->body_target());
    908 
    909       __ bind(&slow_case);
    910     }
    911 
    912     // Record position before stub call for type feedback.
    913     SetExpressionPosition(clause);
    914     Handle<Code> ic =
    915         CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
    916     CallIC(ic, clause->CompareId());
    917     patch_site.EmitPatchInfo();
    918 
    919     Label skip;
    920     __ Branch(&skip);
    921     PrepareForBailout(clause, BailoutState::TOS_REGISTER);
    922     __ LoadRoot(at, Heap::kTrueValueRootIndex);
    923     __ Branch(&next_test, ne, v0, Operand(at));
    924     __ Drop(1);
    925     __ Branch(clause->body_target());
    926     __ bind(&skip);
    927 
    928     __ Branch(&next_test, ne, v0, Operand(zero_reg));
    929     __ Drop(1);  // Switch value is no longer needed.
    930     __ Branch(clause->body_target());
    931   }
    932 
    933   // Discard the test value and jump to the default if present, otherwise to
    934   // the end of the statement.
    935   __ bind(&next_test);
    936   DropOperands(1);  // Switch value is no longer needed.
    937   if (default_clause == NULL) {
    938     __ Branch(nested_statement.break_label());
    939   } else {
    940     __ Branch(default_clause->body_target());
    941   }
    942 
    943   // Compile all the case bodies.
    944   for (int i = 0; i < clauses->length(); i++) {
    945     Comment cmnt(masm_, "[ Case body");
    946     CaseClause* clause = clauses->at(i);
    947     __ bind(clause->body_target());
    948     PrepareForBailoutForId(clause->EntryId(), BailoutState::NO_REGISTERS);
    949     VisitStatements(clause->statements());
    950   }
    951 
    952   __ bind(nested_statement.break_label());
    953   PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
    954 }
    955 
    956 
    957 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
    958   Comment cmnt(masm_, "[ ForInStatement");
    959   SetStatementPosition(stmt, SKIP_BREAK);
    960 
    961   FeedbackSlot slot = stmt->ForInFeedbackSlot();
    962 
    963   // Get the object to enumerate over.
    964   SetExpressionAsStatementPosition(stmt->enumerable());
    965   VisitForAccumulatorValue(stmt->enumerable());
    966   __ mov(a0, result_register());
    967   OperandStackDepthIncrement(5);
    968 
    969   Label loop, exit;
    970   Iteration loop_statement(this, stmt);
    971   increment_loop_depth();
    972 
    973   // If the object is null or undefined, skip over the loop, otherwise convert
    974   // it to a JS receiver.  See ECMA-262 version 5, section 12.6.4.
    975   Label convert, done_convert;
    976   __ JumpIfSmi(a0, &convert);
    977   __ GetObjectType(a0, a1, a1);
    978   __ Branch(USE_DELAY_SLOT, &done_convert, ge, a1,
    979             Operand(FIRST_JS_RECEIVER_TYPE));
    980   __ LoadRoot(at, Heap::kNullValueRootIndex);  // In delay slot.
    981   __ Branch(USE_DELAY_SLOT, &exit, eq, a0, Operand(at));
    982   __ LoadRoot(at, Heap::kUndefinedValueRootIndex);  // In delay slot.
    983   __ Branch(&exit, eq, a0, Operand(at));
    984   __ bind(&convert);
    985   __ Call(isolate()->builtins()->ToObject(), RelocInfo::CODE_TARGET);
    986   RestoreContext();
    987   __ mov(a0, v0);
    988   __ bind(&done_convert);
    989   PrepareForBailoutForId(stmt->ToObjectId(), BailoutState::TOS_REGISTER);
    990   __ push(a0);
    991 
    992   // Check cache validity in generated code. If we cannot guarantee cache
    993   // validity, call the runtime system to check cache validity or get the
    994   // property names in a fixed array. Note: Proxies never have an enum cache,
    995   // so will always take the slow path.
    996   Label call_runtime;
    997   __ CheckEnumCache(&call_runtime);
    998 
    999   // The enum cache is valid.  Load the map of the object being
   1000   // iterated over and use the cache for the iteration.
   1001   Label use_cache;
   1002   __ lw(v0, FieldMemOperand(a0, HeapObject::kMapOffset));
   1003   __ Branch(&use_cache);
   1004 
   1005   // Get the set of properties to enumerate.
   1006   __ bind(&call_runtime);
   1007   __ push(a0);  // Duplicate the enumerable object on the stack.
   1008   __ CallRuntime(Runtime::kForInEnumerate);
   1009   PrepareForBailoutForId(stmt->EnumId(), BailoutState::TOS_REGISTER);
   1010 
   1011   // If we got a map from the runtime call, we can do a fast
   1012   // modification check. Otherwise, we got a fixed array, and we have
   1013   // to do a slow check.
   1014   Label fixed_array;
   1015   __ lw(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
   1016   __ LoadRoot(at, Heap::kMetaMapRootIndex);
   1017   __ Branch(&fixed_array, ne, a2, Operand(at));
   1018 
   1019   // We got a map in register v0. Get the enumeration cache from it.
   1020   Label no_descriptors;
   1021   __ bind(&use_cache);
   1022 
   1023   __ EnumLength(a1, v0);
   1024   __ Branch(&no_descriptors, eq, a1, Operand(Smi::kZero));
   1025 
   1026   __ LoadInstanceDescriptors(v0, a2);
   1027   __ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheOffset));
   1028   __ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheBridgeCacheOffset));
   1029 
   1030   // Set up the four remaining stack slots.
   1031   __ li(a0, Operand(Smi::kZero));
   1032   // Push map, enumeration cache, enumeration cache length (as smi) and zero.
   1033   __ Push(v0, a2, a1, a0);
   1034   __ jmp(&loop);
   1035 
   1036   __ bind(&no_descriptors);
   1037   __ Drop(1);
   1038   __ jmp(&exit);
   1039 
   1040   // We got a fixed array in register v0. Iterate through that.
   1041   __ bind(&fixed_array);
   1042 
   1043   __ li(a1, Operand(Smi::FromInt(1)));  // Smi(1) indicates slow check
   1044   __ Push(a1, v0);  // Smi and array
   1045   __ lw(a1, FieldMemOperand(v0, FixedArray::kLengthOffset));
   1046   __ Push(a1);  // Fixed array length (as smi).
   1047   PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
   1048   __ li(a0, Operand(Smi::kZero));
   1049   __ Push(a0);  // Initial index.
   1050 
   1051   // Generate code for doing the condition check.
   1052   __ bind(&loop);
   1053   SetExpressionAsStatementPosition(stmt->each());
   1054 
   1055   // Load the current count to a0, load the length to a1.
   1056   __ lw(a0, MemOperand(sp, 0 * kPointerSize));
   1057   __ lw(a1, MemOperand(sp, 1 * kPointerSize));
   1058   __ Branch(loop_statement.break_label(), hs, a0, Operand(a1));
   1059 
   1060   // Get the current entry of the array into result_register.
   1061   __ lw(a2, MemOperand(sp, 2 * kPointerSize));
   1062   __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
   1063   __ Lsa(t0, a2, a0, kPointerSizeLog2 - kSmiTagSize);
   1064   __ lw(result_register(), MemOperand(t0));  // Current entry.
   1065 
   1066   // Get the expected map from the stack or a smi in the
   1067   // permanent slow case into register a2.
   1068   __ lw(a2, MemOperand(sp, 3 * kPointerSize));
   1069 
   1070   // Check if the expected map still matches that of the enumerable.
   1071   // If not, we may have to filter the key.
   1072   Label update_each;
   1073   __ lw(a1, MemOperand(sp, 4 * kPointerSize));
   1074   __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset));
   1075   __ Branch(&update_each, eq, t0, Operand(a2));
   1076 
   1077   // We need to filter the key, record slow-path here.
   1078   int const vector_index = SmiFromSlot(slot)->value();
   1079   __ EmitLoadFeedbackVector(a3);
   1080   __ li(a2, Operand(FeedbackVector::MegamorphicSentinel(isolate())));
   1081   __ sw(a2, FieldMemOperand(a3, FixedArray::OffsetOfElementAt(vector_index)));
   1082 
   1083   __ mov(a0, result_register());
   1084   // a0 contains the key. The receiver in a1 is the second argument to the
   1085   // ForInFilter. ForInFilter returns undefined if the receiver doesn't
   1086   // have the key or returns the name-converted key.
   1087   __ Call(isolate()->builtins()->ForInFilter(), RelocInfo::CODE_TARGET);
   1088   RestoreContext();
   1089   PrepareForBailoutForId(stmt->FilterId(), BailoutState::TOS_REGISTER);
   1090   __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
   1091   __ Branch(loop_statement.continue_label(), eq, result_register(),
   1092             Operand(at));
   1093 
   1094   // Update the 'each' property or variable from the possibly filtered
   1095   // entry in the result_register.
   1096   __ bind(&update_each);
   1097   // Perform the assignment as if via '='.
   1098   { EffectContext context(this);
   1099     EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
   1100     PrepareForBailoutForId(stmt->AssignmentId(), BailoutState::NO_REGISTERS);
   1101   }
   1102 
   1103   // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
   1104   PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS);
   1105   // Generate code for the body of the loop.
   1106   Visit(stmt->body());
   1107 
   1108   // Generate code for the going to the next element by incrementing
   1109   // the index (smi) stored on top of the stack.
   1110   __ bind(loop_statement.continue_label());
   1111   PrepareForBailoutForId(stmt->IncrementId(), BailoutState::NO_REGISTERS);
   1112   __ pop(a0);
   1113   __ Addu(a0, a0, Operand(Smi::FromInt(1)));
   1114   __ push(a0);
   1115 
   1116   EmitBackEdgeBookkeeping(stmt, &loop);
   1117   __ Branch(&loop);
   1118 
   1119   // Remove the pointers stored on the stack.
   1120   __ bind(loop_statement.break_label());
   1121   DropOperands(5);
   1122 
   1123   // Exit and decrement the loop depth.
   1124   PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
   1125   __ bind(&exit);
   1126   decrement_loop_depth();
   1127 }
   1128 
   1129 void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
   1130                                           FeedbackSlot slot) {
   1131   DCHECK(NeedsHomeObject(initializer));
   1132   __ lw(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
   1133   __ lw(StoreDescriptor::ValueRegister(),
   1134         MemOperand(sp, offset * kPointerSize));
   1135   CallStoreIC(slot, isolate()->factory()->home_object_symbol());
   1136 }
   1137 
   1138 void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
   1139                                                      int offset,
   1140                                                      FeedbackSlot slot) {
   1141   DCHECK(NeedsHomeObject(initializer));
   1142   __ Move(StoreDescriptor::ReceiverRegister(), v0);
   1143   __ lw(StoreDescriptor::ValueRegister(),
   1144         MemOperand(sp, offset * kPointerSize));
   1145   CallStoreIC(slot, isolate()->factory()->home_object_symbol());
   1146 }
   1147 
   1148 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
   1149                                          TypeofMode typeof_mode) {
   1150   // Record position before possible IC call.
   1151   SetExpressionPosition(proxy);
   1152   PrepareForBailoutForId(proxy->BeforeId(), BailoutState::NO_REGISTERS);
   1153   Variable* var = proxy->var();
   1154 
   1155   // Two cases: global variables and all other types of variables.
   1156   switch (var->location()) {
   1157     case VariableLocation::UNALLOCATED: {
   1158       Comment cmnt(masm_, "[ Global variable");
   1159       EmitGlobalVariableLoad(proxy, typeof_mode);
   1160       context()->Plug(v0);
   1161       break;
   1162     }
   1163 
   1164     case VariableLocation::PARAMETER:
   1165     case VariableLocation::LOCAL:
   1166     case VariableLocation::CONTEXT: {
   1167       DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
   1168       Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
   1169                                                : "[ Stack variable");
   1170       if (proxy->hole_check_mode() == HoleCheckMode::kRequired) {
   1171         // Throw a reference error when using an uninitialized let/const
   1172         // binding in harmony mode.
   1173         Label done;
   1174         GetVar(v0, var);
   1175         __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
   1176         __ subu(at, v0, at);  // Sub as compare: at == 0 on eq.
   1177         __ Branch(&done, ne, at, Operand(zero_reg));
   1178         __ li(a0, Operand(var->name()));
   1179         __ push(a0);
   1180         __ CallRuntime(Runtime::kThrowReferenceError);
   1181         __ bind(&done);
   1182         context()->Plug(v0);
   1183         break;
   1184       }
   1185       context()->Plug(var);
   1186       break;
   1187     }
   1188 
   1189     case VariableLocation::LOOKUP:
   1190     case VariableLocation::MODULE:
   1191       UNREACHABLE();
   1192   }
   1193 }
   1194 
   1195 
   1196 void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
   1197   Expression* expression = (property == NULL) ? NULL : property->value();
   1198   if (expression == NULL) {
   1199     __ LoadRoot(a1, Heap::kNullValueRootIndex);
   1200     PushOperand(a1);
   1201   } else {
   1202     VisitForStackValue(expression);
   1203     if (NeedsHomeObject(expression)) {
   1204       DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
   1205              property->kind() == ObjectLiteral::Property::SETTER);
   1206       int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
   1207       EmitSetHomeObject(expression, offset, property->GetSlot());
   1208     }
   1209   }
   1210 }
   1211 
   1212 
   1213 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
   1214   Comment cmnt(masm_, "[ ObjectLiteral");
   1215 
   1216   Handle<BoilerplateDescription> constant_properties =
   1217       expr->GetOrBuildConstantProperties(isolate());
   1218   __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
   1219   __ li(a2, Operand(SmiFromSlot(expr->literal_slot())));
   1220   __ li(a1, Operand(constant_properties));
   1221   __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
   1222   if (MustCreateObjectLiteralWithRuntime(expr)) {
   1223     __ Push(a3, a2, a1, a0);
   1224     __ CallRuntime(Runtime::kCreateObjectLiteral);
   1225   } else {
   1226     Callable callable = CodeFactory::FastCloneShallowObject(
   1227         isolate(), expr->properties_count());
   1228     __ Call(callable.code(), RelocInfo::CODE_TARGET);
   1229     RestoreContext();
   1230   }
   1231   PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
   1232 
   1233   // If result_saved is true the result is on top of the stack.  If
   1234   // result_saved is false the result is in v0.
   1235   bool result_saved = false;
   1236 
   1237   AccessorTable accessor_table(zone());
   1238   for (int i = 0; i < expr->properties()->length(); i++) {
   1239     ObjectLiteral::Property* property = expr->properties()->at(i);
   1240     DCHECK(!property->is_computed_name());
   1241     if (property->IsCompileTimeValue()) continue;
   1242 
   1243     Literal* key = property->key()->AsLiteral();
   1244     Expression* value = property->value();
   1245     if (!result_saved) {
   1246       PushOperand(v0);  // Save result on stack.
   1247       result_saved = true;
   1248     }
   1249     switch (property->kind()) {
   1250       case ObjectLiteral::Property::SPREAD:
   1251       case ObjectLiteral::Property::CONSTANT:
   1252         UNREACHABLE();
   1253       case ObjectLiteral::Property::MATERIALIZED_LITERAL:
   1254         DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
   1255         // Fall through.
   1256       case ObjectLiteral::Property::COMPUTED:
   1257         // It is safe to use [[Put]] here because the boilerplate already
   1258         // contains computed properties with an uninitialized value.
   1259         if (key->IsStringLiteral()) {
   1260           DCHECK(key->IsPropertyName());
   1261           if (property->emit_store()) {
   1262             VisitForAccumulatorValue(value);
   1263             __ mov(StoreDescriptor::ValueRegister(), result_register());
   1264             DCHECK(StoreDescriptor::ValueRegister().is(a0));
   1265             __ lw(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
   1266             CallStoreIC(property->GetSlot(0), key->value(), true);
   1267             PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
   1268 
   1269             if (NeedsHomeObject(value)) {
   1270               EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
   1271             }
   1272           } else {
   1273             VisitForEffect(value);
   1274           }
   1275           break;
   1276         }
   1277         // Duplicate receiver on stack.
   1278         __ lw(a0, MemOperand(sp));
   1279         PushOperand(a0);
   1280         VisitForStackValue(key);
   1281         VisitForStackValue(value);
   1282         if (property->emit_store()) {
   1283           if (NeedsHomeObject(value)) {
   1284             EmitSetHomeObject(value, 2, property->GetSlot());
   1285           }
   1286           __ li(a0, Operand(Smi::FromInt(SLOPPY)));  // PropertyAttributes.
   1287           PushOperand(a0);
   1288           CallRuntimeWithOperands(Runtime::kSetProperty);
   1289         } else {
   1290           DropOperands(3);
   1291         }
   1292         break;
   1293       case ObjectLiteral::Property::PROTOTYPE:
   1294         // Duplicate receiver on stack.
   1295         __ lw(a0, MemOperand(sp));
   1296         PushOperand(a0);
   1297         VisitForStackValue(value);
   1298         DCHECK(property->emit_store());
   1299         CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
   1300         PrepareForBailoutForId(expr->GetIdForPropertySet(i),
   1301                                BailoutState::NO_REGISTERS);
   1302         break;
   1303       case ObjectLiteral::Property::GETTER:
   1304         if (property->emit_store()) {
   1305           AccessorTable::Iterator it = accessor_table.lookup(key);
   1306           it->second->bailout_id = expr->GetIdForPropertySet(i);
   1307           it->second->getter = property;
   1308         }
   1309         break;
   1310       case ObjectLiteral::Property::SETTER:
   1311         if (property->emit_store()) {
   1312           AccessorTable::Iterator it = accessor_table.lookup(key);
   1313           it->second->bailout_id = expr->GetIdForPropertySet(i);
   1314           it->second->setter = property;
   1315         }
   1316         break;
   1317     }
   1318   }
   1319 
   1320   // Emit code to define accessors, using only a single call to the runtime for
   1321   // each pair of corresponding getters and setters.
   1322   for (AccessorTable::Iterator it = accessor_table.begin();
   1323        it != accessor_table.end();
   1324        ++it) {
   1325     __ lw(a0, MemOperand(sp));  // Duplicate receiver.
   1326     PushOperand(a0);
   1327     VisitForStackValue(it->first);
   1328     EmitAccessor(it->second->getter);
   1329     EmitAccessor(it->second->setter);
   1330     __ li(a0, Operand(Smi::FromInt(NONE)));
   1331     PushOperand(a0);
   1332     CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
   1333     PrepareForBailoutForId(it->second->bailout_id, BailoutState::NO_REGISTERS);
   1334   }
   1335 
   1336   if (result_saved) {
   1337     context()->PlugTOS();
   1338   } else {
   1339     context()->Plug(v0);
   1340   }
   1341 }
   1342 
   1343 
   1344 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
   1345   Comment cmnt(masm_, "[ ArrayLiteral");
   1346 
   1347   Handle<ConstantElementsPair> constant_elements =
   1348       expr->GetOrBuildConstantElements(isolate());
   1349 
   1350   __ mov(a0, result_register());
   1351   __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
   1352   __ li(a2, Operand(SmiFromSlot(expr->literal_slot())));
   1353   __ li(a1, Operand(constant_elements));
   1354   if (MustCreateArrayLiteralWithRuntime(expr)) {
   1355     __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
   1356     __ Push(a3, a2, a1, a0);
   1357     __ CallRuntime(Runtime::kCreateArrayLiteral);
   1358   } else {
   1359     Callable callable =
   1360         CodeFactory::FastCloneShallowArray(isolate(), TRACK_ALLOCATION_SITE);
   1361     __ Call(callable.code(), RelocInfo::CODE_TARGET);
   1362     RestoreContext();
   1363   }
   1364   PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
   1365 
   1366   bool result_saved = false;  // Is the result saved to the stack?
   1367   ZoneList<Expression*>* subexprs = expr->values();
   1368   int length = subexprs->length();
   1369 
   1370   // Emit code to evaluate all the non-constant subexpressions and to store
   1371   // them into the newly cloned array.
   1372   for (int array_index = 0; array_index < length; array_index++) {
   1373     Expression* subexpr = subexprs->at(array_index);
   1374     DCHECK(!subexpr->IsSpread());
   1375 
   1376     // If the subexpression is a literal or a simple materialized literal it
   1377     // is already set in the cloned array.
   1378     if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
   1379 
   1380     if (!result_saved) {
   1381       PushOperand(v0);  // array literal
   1382       result_saved = true;
   1383     }
   1384 
   1385     VisitForAccumulatorValue(subexpr);
   1386 
   1387     __ li(StoreDescriptor::NameRegister(), Operand(Smi::FromInt(array_index)));
   1388     __ lw(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0));
   1389     __ mov(StoreDescriptor::ValueRegister(), result_register());
   1390     CallKeyedStoreIC(expr->LiteralFeedbackSlot());
   1391 
   1392     PrepareForBailoutForId(expr->GetIdForElement(array_index),
   1393                            BailoutState::NO_REGISTERS);
   1394   }
   1395 
   1396   if (result_saved) {
   1397     context()->PlugTOS();
   1398   } else {
   1399     context()->Plug(v0);
   1400   }
   1401 }
   1402 
   1403 
   1404 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
   1405   DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
   1406 
   1407   Comment cmnt(masm_, "[ Assignment");
   1408 
   1409   Property* property = expr->target()->AsProperty();
   1410   LhsKind assign_type = Property::GetAssignType(property);
   1411 
   1412   // Evaluate LHS expression.
   1413   switch (assign_type) {
   1414     case VARIABLE:
   1415       // Nothing to do here.
   1416       break;
   1417     case NAMED_PROPERTY:
   1418       if (expr->is_compound()) {
   1419         // We need the receiver both on the stack and in the register.
   1420         VisitForStackValue(property->obj());
   1421         __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
   1422       } else {
   1423         VisitForStackValue(property->obj());
   1424       }
   1425       break;
   1426     case KEYED_PROPERTY:
   1427       // We need the key and receiver on both the stack and in v0 and a1.
   1428       if (expr->is_compound()) {
   1429         VisitForStackValue(property->obj());
   1430         VisitForStackValue(property->key());
   1431         __ lw(LoadDescriptor::ReceiverRegister(),
   1432               MemOperand(sp, 1 * kPointerSize));
   1433         __ lw(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
   1434       } else {
   1435         VisitForStackValue(property->obj());
   1436         VisitForStackValue(property->key());
   1437       }
   1438       break;
   1439     case NAMED_SUPER_PROPERTY:
   1440     case KEYED_SUPER_PROPERTY:
   1441       UNREACHABLE();
   1442       break;
   1443   }
   1444 
   1445   // For compound assignments we need another deoptimization point after the
   1446   // variable/property load.
   1447   if (expr->is_compound()) {
   1448     { AccumulatorValueContext context(this);
   1449       switch (assign_type) {
   1450         case VARIABLE:
   1451           EmitVariableLoad(expr->target()->AsVariableProxy());
   1452           PrepareForBailout(expr->target(), BailoutState::TOS_REGISTER);
   1453           break;
   1454         case NAMED_PROPERTY:
   1455           EmitNamedPropertyLoad(property);
   1456           PrepareForBailoutForId(property->LoadId(),
   1457                                  BailoutState::TOS_REGISTER);
   1458           break;
   1459         case KEYED_PROPERTY:
   1460           EmitKeyedPropertyLoad(property);
   1461           PrepareForBailoutForId(property->LoadId(),
   1462                                  BailoutState::TOS_REGISTER);
   1463           break;
   1464         case NAMED_SUPER_PROPERTY:
   1465         case KEYED_SUPER_PROPERTY:
   1466           UNREACHABLE();
   1467           break;
   1468       }
   1469     }
   1470 
   1471     Token::Value op = expr->binary_op();
   1472     PushOperand(v0);  // Left operand goes on the stack.
   1473     VisitForAccumulatorValue(expr->value());
   1474 
   1475     AccumulatorValueContext context(this);
   1476     if (ShouldInlineSmiCase(op)) {
   1477       EmitInlineSmiBinaryOp(expr->binary_operation(),
   1478                             op,
   1479                             expr->target(),
   1480                             expr->value());
   1481     } else {
   1482       EmitBinaryOp(expr->binary_operation(), op);
   1483     }
   1484 
   1485     // Deoptimization point in case the binary operation may have side effects.
   1486     PrepareForBailout(expr->binary_operation(), BailoutState::TOS_REGISTER);
   1487   } else {
   1488     VisitForAccumulatorValue(expr->value());
   1489   }
   1490 
   1491   SetExpressionPosition(expr);
   1492 
   1493   // Store the value.
   1494   switch (assign_type) {
   1495     case VARIABLE: {
   1496       VariableProxy* proxy = expr->target()->AsVariableProxy();
   1497       EmitVariableAssignment(proxy->var(), expr->op(), expr->AssignmentSlot(),
   1498                              proxy->hole_check_mode());
   1499       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
   1500       context()->Plug(v0);
   1501       break;
   1502     }
   1503     case NAMED_PROPERTY:
   1504       EmitNamedPropertyAssignment(expr);
   1505       break;
   1506     case KEYED_PROPERTY:
   1507       EmitKeyedPropertyAssignment(expr);
   1508       break;
   1509     case NAMED_SUPER_PROPERTY:
   1510     case KEYED_SUPER_PROPERTY:
   1511       UNREACHABLE();
   1512       break;
   1513   }
   1514 }
   1515 
   1516 
   1517 void FullCodeGenerator::VisitYield(Yield* expr) {
   1518   // Resumable functions are not supported.
   1519   UNREACHABLE();
   1520 }
   1521 
   1522 void FullCodeGenerator::PushOperands(Register reg1, Register reg2) {
   1523   OperandStackDepthIncrement(2);
   1524   __ Push(reg1, reg2);
   1525 }
   1526 
   1527 void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
   1528                                      Register reg3) {
   1529   OperandStackDepthIncrement(3);
   1530   __ Push(reg1, reg2, reg3);
   1531 }
   1532 
   1533 void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
   1534                                      Register reg3, Register reg4) {
   1535   OperandStackDepthIncrement(4);
   1536   __ Push(reg1, reg2, reg3, reg4);
   1537 }
   1538 
   1539 void FullCodeGenerator::PopOperands(Register reg1, Register reg2) {
   1540   OperandStackDepthDecrement(2);
   1541   __ Pop(reg1, reg2);
   1542 }
   1543 
   1544 void FullCodeGenerator::EmitOperandStackDepthCheck() {
   1545   if (FLAG_debug_code) {
   1546     int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
   1547                         operand_stack_depth_ * kPointerSize;
   1548     __ Subu(v0, fp, sp);
   1549     __ Assert(eq, kUnexpectedStackDepth, v0, Operand(expected_diff));
   1550   }
   1551 }
   1552 
   1553 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
   1554   Label allocate, done_allocate;
   1555 
   1556   __ Allocate(JSIteratorResult::kSize, v0, a2, a3, &allocate,
   1557               NO_ALLOCATION_FLAGS);
   1558   __ jmp(&done_allocate);
   1559 
   1560   __ bind(&allocate);
   1561   __ Push(Smi::FromInt(JSIteratorResult::kSize));
   1562   __ CallRuntime(Runtime::kAllocateInNewSpace);
   1563 
   1564   __ bind(&done_allocate);
   1565   __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, a1);
   1566   PopOperand(a2);
   1567   __ LoadRoot(a3,
   1568               done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
   1569   __ LoadRoot(t0, Heap::kEmptyFixedArrayRootIndex);
   1570   __ sw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
   1571   __ sw(t0, FieldMemOperand(v0, JSObject::kPropertiesOffset));
   1572   __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset));
   1573   __ sw(a2, FieldMemOperand(v0, JSIteratorResult::kValueOffset));
   1574   __ sw(a3, FieldMemOperand(v0, JSIteratorResult::kDoneOffset));
   1575   STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
   1576 }
   1577 
   1578 
   1579 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
   1580                                               Token::Value op,
   1581                                               Expression* left_expr,
   1582                                               Expression* right_expr) {
   1583   Label done, smi_case, stub_call;
   1584 
   1585   Register scratch1 = a2;
   1586   Register scratch2 = a3;
   1587 
   1588   // Get the arguments.
   1589   Register left = a1;
   1590   Register right = a0;
   1591   PopOperand(left);
   1592   __ mov(a0, result_register());
   1593 
   1594   // Perform combined smi check on both operands.
   1595   __ Or(scratch1, left, Operand(right));
   1596   STATIC_ASSERT(kSmiTag == 0);
   1597   JumpPatchSite patch_site(masm_);
   1598   patch_site.EmitJumpIfSmi(scratch1, &smi_case);
   1599 
   1600   __ bind(&stub_call);
   1601   Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
   1602   CallIC(code, expr->BinaryOperationFeedbackId());
   1603   patch_site.EmitPatchInfo();
   1604   __ jmp(&done);
   1605 
   1606   __ bind(&smi_case);
   1607   // Smi case. This code works the same way as the smi-smi case in the type
   1608   // recording binary operation stub, see
   1609   switch (op) {
   1610     case Token::SAR:
   1611       __ GetLeastBitsFromSmi(scratch1, right, 5);
   1612       __ srav(right, left, scratch1);
   1613       __ And(v0, right, Operand(~kSmiTagMask));
   1614       break;
   1615     case Token::SHL: {
   1616       __ SmiUntag(scratch1, left);
   1617       __ GetLeastBitsFromSmi(scratch2, right, 5);
   1618       __ sllv(scratch1, scratch1, scratch2);
   1619       __ Addu(scratch2, scratch1, Operand(0x40000000));
   1620       __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
   1621       __ SmiTag(v0, scratch1);
   1622       break;
   1623     }
   1624     case Token::SHR: {
   1625       __ SmiUntag(scratch1, left);
   1626       __ GetLeastBitsFromSmi(scratch2, right, 5);
   1627       __ srlv(scratch1, scratch1, scratch2);
   1628       __ And(scratch2, scratch1, 0xc0000000);
   1629       __ Branch(&stub_call, ne, scratch2, Operand(zero_reg));
   1630       __ SmiTag(v0, scratch1);
   1631       break;
   1632     }
   1633     case Token::ADD:
   1634       __ AddBranchOvf(v0, left, Operand(right), &stub_call);
   1635       break;
   1636     case Token::SUB:
   1637       __ SubBranchOvf(v0, left, Operand(right), &stub_call);
   1638       break;
   1639     case Token::MUL: {
   1640       __ SmiUntag(scratch1, right);
   1641       __ Mul(scratch2, v0, left, scratch1);
   1642       __ sra(scratch1, v0, 31);
   1643       __ Branch(&stub_call, ne, scratch1, Operand(scratch2));
   1644       __ Branch(&done, ne, v0, Operand(zero_reg));
   1645       __ Addu(scratch2, right, left);
   1646       __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
   1647       DCHECK(Smi::kZero == 0);
   1648       __ mov(v0, zero_reg);
   1649       break;
   1650     }
   1651     case Token::BIT_OR:
   1652       __ Or(v0, left, Operand(right));
   1653       break;
   1654     case Token::BIT_AND:
   1655       __ And(v0, left, Operand(right));
   1656       break;
   1657     case Token::BIT_XOR:
   1658       __ Xor(v0, left, Operand(right));
   1659       break;
   1660     default:
   1661       UNREACHABLE();
   1662   }
   1663 
   1664   __ bind(&done);
   1665   context()->Plug(v0);
   1666 }
   1667 
   1668 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
   1669   __ mov(a0, result_register());
   1670   PopOperand(a1);
   1671   Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
   1672   JumpPatchSite patch_site(masm_);    // unbound, signals no inlined smi code.
   1673   CallIC(code, expr->BinaryOperationFeedbackId());
   1674   patch_site.EmitPatchInfo();
   1675   context()->Plug(v0);
   1676 }
   1677 
   1678 void FullCodeGenerator::EmitAssignment(Expression* expr, FeedbackSlot slot) {
   1679   DCHECK(expr->IsValidReferenceExpressionOrThis());
   1680 
   1681   Property* prop = expr->AsProperty();
   1682   LhsKind assign_type = Property::GetAssignType(prop);
   1683 
   1684   switch (assign_type) {
   1685     case VARIABLE: {
   1686       VariableProxy* proxy = expr->AsVariableProxy();
   1687       EffectContext context(this);
   1688       EmitVariableAssignment(proxy->var(), Token::ASSIGN, slot,
   1689                              proxy->hole_check_mode());
   1690       break;
   1691     }
   1692     case NAMED_PROPERTY: {
   1693       PushOperand(result_register());  // Preserve value.
   1694       VisitForAccumulatorValue(prop->obj());
   1695       __ mov(StoreDescriptor::ReceiverRegister(), result_register());
   1696       PopOperand(StoreDescriptor::ValueRegister());  // Restore value.
   1697       CallStoreIC(slot, prop->key()->AsLiteral()->value());
   1698       break;
   1699     }
   1700     case KEYED_PROPERTY: {
   1701       PushOperand(result_register());  // Preserve value.
   1702       VisitForStackValue(prop->obj());
   1703       VisitForAccumulatorValue(prop->key());
   1704       __ mov(StoreDescriptor::NameRegister(), result_register());
   1705       PopOperands(StoreDescriptor::ValueRegister(),
   1706                   StoreDescriptor::ReceiverRegister());
   1707       CallKeyedStoreIC(slot);
   1708       break;
   1709     }
   1710     case NAMED_SUPER_PROPERTY:
   1711     case KEYED_SUPER_PROPERTY:
   1712       UNREACHABLE();
   1713       break;
   1714   }
   1715   context()->Plug(v0);
   1716 }
   1717 
   1718 
   1719 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
   1720     Variable* var, MemOperand location) {
   1721   __ sw(result_register(), location);
   1722   if (var->IsContextSlot()) {
   1723     // RecordWrite may destroy all its register arguments.
   1724     __ Move(a3, result_register());
   1725     int offset = Context::SlotOffset(var->index());
   1726     __ RecordWriteContextSlot(
   1727         a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
   1728   }
   1729 }
   1730 
   1731 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
   1732                                                FeedbackSlot slot,
   1733                                                HoleCheckMode hole_check_mode) {
   1734   if (var->IsUnallocated()) {
   1735     // Global var, const, or let.
   1736     __ mov(StoreDescriptor::ValueRegister(), result_register());
   1737     __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
   1738     CallStoreIC(slot, var->name());
   1739 
   1740   } else if (IsLexicalVariableMode(var->mode()) && op != Token::INIT) {
   1741     DCHECK(!var->IsLookupSlot());
   1742     DCHECK(var->IsStackAllocated() || var->IsContextSlot());
   1743     MemOperand location = VarOperand(var, a1);
   1744     // Perform an initialization check for lexically declared variables.
   1745     if (hole_check_mode == HoleCheckMode::kRequired) {
   1746       Label assign;
   1747       __ lw(a3, location);
   1748       __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
   1749       __ Branch(&assign, ne, a3, Operand(t0));
   1750       __ li(a3, Operand(var->name()));
   1751       __ push(a3);
   1752       __ CallRuntime(Runtime::kThrowReferenceError);
   1753       __ bind(&assign);
   1754     }
   1755     if (var->mode() != CONST) {
   1756       EmitStoreToStackLocalOrContextSlot(var, location);
   1757     } else if (var->throw_on_const_assignment(language_mode())) {
   1758       __ CallRuntime(Runtime::kThrowConstAssignError);
   1759     }
   1760   } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
   1761     // Initializing assignment to const {this} needs a write barrier.
   1762     DCHECK(var->IsStackAllocated() || var->IsContextSlot());
   1763     Label uninitialized_this;
   1764     MemOperand location = VarOperand(var, a1);
   1765     __ lw(a3, location);
   1766     __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
   1767     __ Branch(&uninitialized_this, eq, a3, Operand(at));
   1768     __ li(a0, Operand(var->name()));
   1769     __ Push(a0);
   1770     __ CallRuntime(Runtime::kThrowReferenceError);
   1771     __ bind(&uninitialized_this);
   1772     EmitStoreToStackLocalOrContextSlot(var, location);
   1773 
   1774   } else {
   1775     DCHECK(var->mode() != CONST || op == Token::INIT);
   1776     DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
   1777     DCHECK(!var->IsLookupSlot());
   1778     // Assignment to var or initializing assignment to let/const in harmony
   1779     // mode.
   1780     MemOperand location = VarOperand(var, a1);
   1781     if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
   1782       // Check for an uninitialized let binding.
   1783       __ lw(a2, location);
   1784       __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
   1785       __ Check(eq, kLetBindingReInitialization, a2, Operand(t0));
   1786     }
   1787     EmitStoreToStackLocalOrContextSlot(var, location);
   1788   }
   1789 }
   1790 
   1791 
   1792 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
   1793   // Assignment to a property, using a named store IC.
   1794   Property* prop = expr->target()->AsProperty();
   1795   DCHECK(prop != NULL);
   1796   DCHECK(prop->key()->IsLiteral());
   1797 
   1798   __ mov(StoreDescriptor::ValueRegister(), result_register());
   1799   PopOperand(StoreDescriptor::ReceiverRegister());
   1800   CallStoreIC(expr->AssignmentSlot(), prop->key()->AsLiteral()->value());
   1801 
   1802   PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
   1803   context()->Plug(v0);
   1804 }
   1805 
   1806 
   1807 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
   1808   // Assignment to a property, using a keyed store IC.
   1809   // Call keyed store IC.
   1810   // The arguments are:
   1811   // - a0 is the value,
   1812   // - a1 is the key,
   1813   // - a2 is the receiver.
   1814   __ mov(StoreDescriptor::ValueRegister(), result_register());
   1815   PopOperands(StoreDescriptor::ReceiverRegister(),
   1816               StoreDescriptor::NameRegister());
   1817   DCHECK(StoreDescriptor::ValueRegister().is(a0));
   1818 
   1819   CallKeyedStoreIC(expr->AssignmentSlot());
   1820 
   1821   PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
   1822   context()->Plug(v0);
   1823 }
   1824 
   1825 // Code common for calls using the IC.
   1826 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
   1827   Expression* callee = expr->expression();
   1828 
   1829   // Get the target function.
   1830   ConvertReceiverMode convert_mode;
   1831   if (callee->IsVariableProxy()) {
   1832     { StackValueContext context(this);
   1833       EmitVariableLoad(callee->AsVariableProxy());
   1834       PrepareForBailout(callee, BailoutState::NO_REGISTERS);
   1835     }
   1836     // Push undefined as receiver. This is patched in the method prologue if it
   1837     // is a sloppy mode method.
   1838     __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
   1839     PushOperand(at);
   1840     convert_mode = ConvertReceiverMode::kNullOrUndefined;
   1841   } else {
   1842     // Load the function from the receiver.
   1843     DCHECK(callee->IsProperty());
   1844     DCHECK(!callee->AsProperty()->IsSuperAccess());
   1845     __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
   1846     EmitNamedPropertyLoad(callee->AsProperty());
   1847     PrepareForBailoutForId(callee->AsProperty()->LoadId(),
   1848                            BailoutState::TOS_REGISTER);
   1849     // Push the target function under the receiver.
   1850     __ lw(at, MemOperand(sp, 0));
   1851     PushOperand(at);
   1852     __ sw(v0, MemOperand(sp, kPointerSize));
   1853     convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
   1854   }
   1855 
   1856   EmitCall(expr, convert_mode);
   1857 }
   1858 
   1859 
   1860 // Code common for calls using the IC.
   1861 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
   1862                                                 Expression* key) {
   1863   // Load the key.
   1864   VisitForAccumulatorValue(key);
   1865 
   1866   Expression* callee = expr->expression();
   1867 
   1868   // Load the function from the receiver.
   1869   DCHECK(callee->IsProperty());
   1870   __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
   1871   __ Move(LoadDescriptor::NameRegister(), v0);
   1872   EmitKeyedPropertyLoad(callee->AsProperty());
   1873   PrepareForBailoutForId(callee->AsProperty()->LoadId(),
   1874                          BailoutState::TOS_REGISTER);
   1875 
   1876   // Push the target function under the receiver.
   1877   __ lw(at, MemOperand(sp, 0));
   1878   PushOperand(at);
   1879   __ sw(v0, MemOperand(sp, kPointerSize));
   1880 
   1881   EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
   1882 }
   1883 
   1884 
   1885 void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
   1886   // Load the arguments.
   1887   ZoneList<Expression*>* args = expr->arguments();
   1888   int arg_count = args->length();
   1889   for (int i = 0; i < arg_count; i++) {
   1890     VisitForStackValue(args->at(i));
   1891   }
   1892 
   1893   PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
   1894   // Record source position of the IC call.
   1895   SetCallPosition(expr, expr->tail_call_mode());
   1896   if (expr->tail_call_mode() == TailCallMode::kAllow) {
   1897     if (FLAG_trace) {
   1898       __ CallRuntime(Runtime::kTraceTailCall);
   1899     }
   1900     // Update profiling counters before the tail call since we will
   1901     // not return to this function.
   1902     EmitProfilingCounterHandlingForReturnSequence(true);
   1903   }
   1904   Handle<Code> code =
   1905       CodeFactory::CallICTrampoline(isolate(), mode, expr->tail_call_mode())
   1906           .code();
   1907   __ li(a3, Operand(IntFromSlot(expr->CallFeedbackICSlot())));
   1908   __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
   1909   __ li(a0, Operand(arg_count));
   1910   CallIC(code);
   1911   OperandStackDepthDecrement(arg_count + 1);
   1912 
   1913   RecordJSReturnSite(expr);
   1914   RestoreContext();
   1915   context()->DropAndPlug(1, v0);
   1916 }
   1917 
   1918 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
   1919   Comment cmnt(masm_, "[ CallNew");
   1920   // According to ECMA-262, section 11.2.2, page 44, the function
   1921   // expression in new calls must be evaluated before the
   1922   // arguments.
   1923 
   1924   // Push constructor on the stack.  If it's not a function it's used as
   1925   // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
   1926   // ignored.g
   1927   DCHECK(!expr->expression()->IsSuperPropertyReference());
   1928   VisitForStackValue(expr->expression());
   1929 
   1930   // Push the arguments ("left-to-right") on the stack.
   1931   ZoneList<Expression*>* args = expr->arguments();
   1932   int arg_count = args->length();
   1933   for (int i = 0; i < arg_count; i++) {
   1934     VisitForStackValue(args->at(i));
   1935   }
   1936 
   1937   // Call the construct call builtin that handles allocation and
   1938   // constructor invocation.
   1939   SetConstructCallPosition(expr);
   1940 
   1941   // Load function and argument count into a1 and a0.
   1942   __ li(a0, Operand(arg_count));
   1943   __ lw(a1, MemOperand(sp, arg_count * kPointerSize));
   1944 
   1945   // Record call targets in unoptimized code.
   1946   __ EmitLoadFeedbackVector(a2);
   1947   __ li(a3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
   1948 
   1949   CallConstructStub stub(isolate());
   1950   CallIC(stub.GetCode());
   1951   OperandStackDepthDecrement(arg_count + 1);
   1952   PrepareForBailoutForId(expr->ReturnId(), BailoutState::TOS_REGISTER);
   1953   RestoreContext();
   1954   context()->Plug(v0);
   1955 }
   1956 
   1957 
   1958 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
   1959   ZoneList<Expression*>* args = expr->arguments();
   1960   DCHECK(args->length() == 1);
   1961 
   1962   VisitForAccumulatorValue(args->at(0));
   1963 
   1964   Label materialize_true, materialize_false;
   1965   Label* if_true = NULL;
   1966   Label* if_false = NULL;
   1967   Label* fall_through = NULL;
   1968   context()->PrepareTest(&materialize_true, &materialize_false,
   1969                          &if_true, &if_false, &fall_through);
   1970 
   1971   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   1972   __ SmiTst(v0, t0);
   1973   Split(eq, t0, Operand(zero_reg), if_true, if_false, fall_through);
   1974 
   1975   context()->Plug(if_true, if_false);
   1976 }
   1977 
   1978 
   1979 void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
   1980   ZoneList<Expression*>* args = expr->arguments();
   1981   DCHECK(args->length() == 1);
   1982 
   1983   VisitForAccumulatorValue(args->at(0));
   1984 
   1985   Label materialize_true, materialize_false;
   1986   Label* if_true = NULL;
   1987   Label* if_false = NULL;
   1988   Label* fall_through = NULL;
   1989   context()->PrepareTest(&materialize_true, &materialize_false,
   1990                          &if_true, &if_false, &fall_through);
   1991 
   1992   __ JumpIfSmi(v0, if_false);
   1993   __ GetObjectType(v0, a1, a1);
   1994   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   1995   Split(ge, a1, Operand(FIRST_JS_RECEIVER_TYPE),
   1996         if_true, if_false, fall_through);
   1997 
   1998   context()->Plug(if_true, if_false);
   1999 }
   2000 
   2001 
   2002 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
   2003   ZoneList<Expression*>* args = expr->arguments();
   2004   DCHECK(args->length() == 1);
   2005 
   2006   VisitForAccumulatorValue(args->at(0));
   2007 
   2008   Label materialize_true, materialize_false;
   2009   Label* if_true = NULL;
   2010   Label* if_false = NULL;
   2011   Label* fall_through = NULL;
   2012   context()->PrepareTest(&materialize_true, &materialize_false,
   2013                          &if_true, &if_false, &fall_through);
   2014 
   2015   __ JumpIfSmi(v0, if_false);
   2016   __ GetObjectType(v0, a1, a1);
   2017   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2018   Split(eq, a1, Operand(JS_ARRAY_TYPE),
   2019         if_true, if_false, fall_through);
   2020 
   2021   context()->Plug(if_true, if_false);
   2022 }
   2023 
   2024 
   2025 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
   2026   ZoneList<Expression*>* args = expr->arguments();
   2027   DCHECK(args->length() == 1);
   2028 
   2029   VisitForAccumulatorValue(args->at(0));
   2030 
   2031   Label materialize_true, materialize_false;
   2032   Label* if_true = NULL;
   2033   Label* if_false = NULL;
   2034   Label* fall_through = NULL;
   2035   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
   2036                          &if_false, &fall_through);
   2037 
   2038   __ JumpIfSmi(v0, if_false);
   2039   __ GetObjectType(v0, a1, a1);
   2040   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2041   Split(eq, a1, Operand(JS_TYPED_ARRAY_TYPE), if_true, if_false, fall_through);
   2042 
   2043   context()->Plug(if_true, if_false);
   2044 }
   2045 
   2046 
   2047 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
   2048   ZoneList<Expression*>* args = expr->arguments();
   2049   DCHECK(args->length() == 1);
   2050 
   2051   VisitForAccumulatorValue(args->at(0));
   2052 
   2053   Label materialize_true, materialize_false;
   2054   Label* if_true = NULL;
   2055   Label* if_false = NULL;
   2056   Label* fall_through = NULL;
   2057   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
   2058                          &if_false, &fall_through);
   2059 
   2060   __ JumpIfSmi(v0, if_false);
   2061   __ GetObjectType(v0, a1, a1);
   2062   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2063   Split(eq, a1, Operand(JS_PROXY_TYPE), if_true, if_false, fall_through);
   2064 
   2065   context()->Plug(if_true, if_false);
   2066 }
   2067 
   2068 
   2069 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
   2070   ZoneList<Expression*>* args = expr->arguments();
   2071   DCHECK(args->length() == 1);
   2072   Label done, null, function, non_function_constructor;
   2073 
   2074   VisitForAccumulatorValue(args->at(0));
   2075 
   2076   // If the object is not a JSReceiver, we return null.
   2077   __ JumpIfSmi(v0, &null);
   2078   STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
   2079   __ GetObjectType(v0, v0, a1);  // Map is now in v0.
   2080   __ Branch(&null, lt, a1, Operand(FIRST_JS_RECEIVER_TYPE));
   2081 
   2082   // Return 'Function' for JSFunction and JSBoundFunction objects.
   2083   STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
   2084   __ Branch(&function, hs, a1, Operand(FIRST_FUNCTION_TYPE));
   2085 
   2086   // Check if the constructor in the map is a JS function.
   2087   Register instance_type = a2;
   2088   __ GetMapConstructor(v0, v0, a1, instance_type);
   2089   __ Branch(&non_function_constructor, ne, instance_type,
   2090             Operand(JS_FUNCTION_TYPE));
   2091 
   2092   // v0 now contains the constructor function. Grab the
   2093   // instance class name from there.
   2094   __ lw(v0, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset));
   2095   __ lw(v0, FieldMemOperand(v0, SharedFunctionInfo::kInstanceClassNameOffset));
   2096   __ Branch(&done);
   2097 
   2098   // Functions have class 'Function'.
   2099   __ bind(&function);
   2100   __ LoadRoot(v0, Heap::kFunction_stringRootIndex);
   2101   __ jmp(&done);
   2102 
   2103   // Objects with a non-function constructor have class 'Object'.
   2104   __ bind(&non_function_constructor);
   2105   __ LoadRoot(v0, Heap::kObject_stringRootIndex);
   2106   __ jmp(&done);
   2107 
   2108   // Non-JS objects have class null.
   2109   __ bind(&null);
   2110   __ LoadRoot(v0, Heap::kNullValueRootIndex);
   2111 
   2112   // All done.
   2113   __ bind(&done);
   2114 
   2115   context()->Plug(v0);
   2116 }
   2117 
   2118 
   2119 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
   2120   ZoneList<Expression*>* args = expr->arguments();
   2121   DCHECK(args->length() == 2);
   2122 
   2123   VisitForStackValue(args->at(0));
   2124   VisitForAccumulatorValue(args->at(1));
   2125   __ mov(a0, result_register());
   2126 
   2127   Register object = a1;
   2128   Register index = a0;
   2129   Register result = v0;
   2130 
   2131   PopOperand(object);
   2132 
   2133   Label need_conversion;
   2134   Label index_out_of_range;
   2135   Label done;
   2136   StringCharCodeAtGenerator generator(object, index, result, &need_conversion,
   2137                                       &need_conversion, &index_out_of_range);
   2138   generator.GenerateFast(masm_);
   2139   __ jmp(&done);
   2140 
   2141   __ bind(&index_out_of_range);
   2142   // When the index is out of range, the spec requires us to return
   2143   // NaN.
   2144   __ LoadRoot(result, Heap::kNanValueRootIndex);
   2145   __ jmp(&done);
   2146 
   2147   __ bind(&need_conversion);
   2148   // Load the undefined value into the result register, which will
   2149   // trigger conversion.
   2150   __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
   2151   __ jmp(&done);
   2152 
   2153   NopRuntimeCallHelper call_helper;
   2154   generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
   2155 
   2156   __ bind(&done);
   2157   context()->Plug(result);
   2158 }
   2159 
   2160 
   2161 void FullCodeGenerator::EmitCall(CallRuntime* expr) {
   2162   ZoneList<Expression*>* args = expr->arguments();
   2163   DCHECK_LE(2, args->length());
   2164   // Push target, receiver and arguments onto the stack.
   2165   for (Expression* const arg : *args) {
   2166     VisitForStackValue(arg);
   2167   }
   2168   PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
   2169   // Move target to a1.
   2170   int const argc = args->length() - 2;
   2171   __ lw(a1, MemOperand(sp, (argc + 1) * kPointerSize));
   2172   // Call the target.
   2173   __ li(a0, Operand(argc));
   2174   __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
   2175   OperandStackDepthDecrement(argc + 1);
   2176   RestoreContext();
   2177   // Discard the function left on TOS.
   2178   context()->DropAndPlug(1, v0);
   2179 }
   2180 
   2181 void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
   2182   ZoneList<Expression*>* args = expr->arguments();
   2183   DCHECK_EQ(1, args->length());
   2184   VisitForAccumulatorValue(args->at(0));
   2185   __ AssertFunction(v0);
   2186   __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
   2187   __ lw(v0, FieldMemOperand(v0, Map::kPrototypeOffset));
   2188   context()->Plug(v0);
   2189 }
   2190 
   2191 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
   2192   DCHECK(expr->arguments()->length() == 0);
   2193   ExternalReference debug_is_active =
   2194       ExternalReference::debug_is_active_address(isolate());
   2195   __ li(at, Operand(debug_is_active));
   2196   __ lb(v0, MemOperand(at));
   2197   __ SmiTag(v0);
   2198   context()->Plug(v0);
   2199 }
   2200 
   2201 
   2202 void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
   2203   ZoneList<Expression*>* args = expr->arguments();
   2204   DCHECK_EQ(2, args->length());
   2205   VisitForStackValue(args->at(0));
   2206   VisitForStackValue(args->at(1));
   2207 
   2208   Label runtime, done;
   2209 
   2210   __ Allocate(JSIteratorResult::kSize, v0, a2, a3, &runtime,
   2211               NO_ALLOCATION_FLAGS);
   2212   __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, a1);
   2213   __ Pop(a2, a3);
   2214   __ LoadRoot(t0, Heap::kEmptyFixedArrayRootIndex);
   2215   __ sw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
   2216   __ sw(t0, FieldMemOperand(v0, JSObject::kPropertiesOffset));
   2217   __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset));
   2218   __ sw(a2, FieldMemOperand(v0, JSIteratorResult::kValueOffset));
   2219   __ sw(a3, FieldMemOperand(v0, JSIteratorResult::kDoneOffset));
   2220   STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
   2221   __ jmp(&done);
   2222 
   2223   __ bind(&runtime);
   2224   CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
   2225 
   2226   __ bind(&done);
   2227   context()->Plug(v0);
   2228 }
   2229 
   2230 
   2231 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
   2232   // Push function.
   2233   __ LoadNativeContextSlot(expr->context_index(), v0);
   2234   PushOperand(v0);
   2235 
   2236   // Push undefined as the receiver.
   2237   __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
   2238   PushOperand(v0);
   2239 }
   2240 
   2241 
   2242 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
   2243   ZoneList<Expression*>* args = expr->arguments();
   2244   int arg_count = args->length();
   2245 
   2246   SetCallPosition(expr);
   2247   __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
   2248   __ li(a0, Operand(arg_count));
   2249   __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
   2250           RelocInfo::CODE_TARGET);
   2251   OperandStackDepthDecrement(arg_count + 1);
   2252   RestoreContext();
   2253 }
   2254 
   2255 
   2256 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
   2257   switch (expr->op()) {
   2258     case Token::DELETE: {
   2259       Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
   2260       Property* property = expr->expression()->AsProperty();
   2261       VariableProxy* proxy = expr->expression()->AsVariableProxy();
   2262 
   2263       if (property != NULL) {
   2264         VisitForStackValue(property->obj());
   2265         VisitForStackValue(property->key());
   2266         CallRuntimeWithOperands(is_strict(language_mode())
   2267                                     ? Runtime::kDeleteProperty_Strict
   2268                                     : Runtime::kDeleteProperty_Sloppy);
   2269         context()->Plug(v0);
   2270       } else if (proxy != NULL) {
   2271         Variable* var = proxy->var();
   2272         // Delete of an unqualified identifier is disallowed in strict mode but
   2273         // "delete this" is allowed.
   2274         bool is_this = var->is_this();
   2275         DCHECK(is_sloppy(language_mode()) || is_this);
   2276         if (var->IsUnallocated()) {
   2277           __ LoadGlobalObject(a2);
   2278           __ li(a1, Operand(var->name()));
   2279           __ Push(a2, a1);
   2280           __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
   2281           context()->Plug(v0);
   2282         } else {
   2283           DCHECK(!var->IsLookupSlot());
   2284           DCHECK(var->IsStackAllocated() || var->IsContextSlot());
   2285           // Result of deleting non-global, non-dynamic variables is false.
   2286           // The subexpression does not have side effects.
   2287           context()->Plug(is_this);
   2288         }
   2289       } else {
   2290         // Result of deleting non-property, non-variable reference is true.
   2291         // The subexpression may have side effects.
   2292         VisitForEffect(expr->expression());
   2293         context()->Plug(true);
   2294       }
   2295       break;
   2296     }
   2297 
   2298     case Token::VOID: {
   2299       Comment cmnt(masm_, "[ UnaryOperation (VOID)");
   2300       VisitForEffect(expr->expression());
   2301       context()->Plug(Heap::kUndefinedValueRootIndex);
   2302       break;
   2303     }
   2304 
   2305     case Token::NOT: {
   2306       Comment cmnt(masm_, "[ UnaryOperation (NOT)");
   2307       if (context()->IsEffect()) {
   2308         // Unary NOT has no side effects so it's only necessary to visit the
   2309         // subexpression.  Match the optimizing compiler by not branching.
   2310         VisitForEffect(expr->expression());
   2311       } else if (context()->IsTest()) {
   2312         const TestContext* test = TestContext::cast(context());
   2313         // The labels are swapped for the recursive call.
   2314         VisitForControl(expr->expression(),
   2315                         test->false_label(),
   2316                         test->true_label(),
   2317                         test->fall_through());
   2318         context()->Plug(test->true_label(), test->false_label());
   2319       } else {
   2320         // We handle value contexts explicitly rather than simply visiting
   2321         // for control and plugging the control flow into the context,
   2322         // because we need to prepare a pair of extra administrative AST ids
   2323         // for the optimizing compiler.
   2324         DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
   2325         Label materialize_true, materialize_false, done;
   2326         VisitForControl(expr->expression(),
   2327                         &materialize_false,
   2328                         &materialize_true,
   2329                         &materialize_true);
   2330         if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
   2331         __ bind(&materialize_true);
   2332         PrepareForBailoutForId(expr->MaterializeTrueId(),
   2333                                BailoutState::NO_REGISTERS);
   2334         __ LoadRoot(v0, Heap::kTrueValueRootIndex);
   2335         if (context()->IsStackValue()) __ push(v0);
   2336         __ jmp(&done);
   2337         __ bind(&materialize_false);
   2338         PrepareForBailoutForId(expr->MaterializeFalseId(),
   2339                                BailoutState::NO_REGISTERS);
   2340         __ LoadRoot(v0, Heap::kFalseValueRootIndex);
   2341         if (context()->IsStackValue()) __ push(v0);
   2342         __ bind(&done);
   2343       }
   2344       break;
   2345     }
   2346 
   2347     case Token::TYPEOF: {
   2348       Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
   2349       {
   2350         AccumulatorValueContext context(this);
   2351         VisitForTypeofValue(expr->expression());
   2352       }
   2353       __ mov(a3, v0);
   2354       __ Call(isolate()->builtins()->Typeof(), RelocInfo::CODE_TARGET);
   2355       context()->Plug(v0);
   2356       break;
   2357     }
   2358 
   2359     default:
   2360       UNREACHABLE();
   2361   }
   2362 }
   2363 
   2364 
   2365 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
   2366   DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
   2367 
   2368   Comment cmnt(masm_, "[ CountOperation");
   2369 
   2370   Property* prop = expr->expression()->AsProperty();
   2371   LhsKind assign_type = Property::GetAssignType(prop);
   2372 
   2373   // Evaluate expression and get value.
   2374   if (assign_type == VARIABLE) {
   2375     DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
   2376     AccumulatorValueContext context(this);
   2377     EmitVariableLoad(expr->expression()->AsVariableProxy());
   2378   } else {
   2379     // Reserve space for result of postfix operation.
   2380     if (expr->is_postfix() && !context()->IsEffect()) {
   2381       __ li(at, Operand(Smi::kZero));
   2382       PushOperand(at);
   2383     }
   2384     switch (assign_type) {
   2385       case NAMED_PROPERTY: {
   2386         // Put the object both on the stack and in the register.
   2387         VisitForStackValue(prop->obj());
   2388         __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
   2389         EmitNamedPropertyLoad(prop);
   2390         break;
   2391       }
   2392 
   2393       case KEYED_PROPERTY: {
   2394         VisitForStackValue(prop->obj());
   2395         VisitForStackValue(prop->key());
   2396         __ lw(LoadDescriptor::ReceiverRegister(),
   2397               MemOperand(sp, 1 * kPointerSize));
   2398         __ lw(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
   2399         EmitKeyedPropertyLoad(prop);
   2400         break;
   2401       }
   2402 
   2403       case NAMED_SUPER_PROPERTY:
   2404       case KEYED_SUPER_PROPERTY:
   2405       case VARIABLE:
   2406         UNREACHABLE();
   2407     }
   2408   }
   2409 
   2410   // We need a second deoptimization point after loading the value
   2411   // in case evaluating the property load my have a side effect.
   2412   if (assign_type == VARIABLE) {
   2413     PrepareForBailout(expr->expression(), BailoutState::TOS_REGISTER);
   2414   } else {
   2415     PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
   2416   }
   2417 
   2418   // Inline smi case if we are in a loop.
   2419   Label stub_call, done;
   2420   JumpPatchSite patch_site(masm_);
   2421 
   2422   int count_value = expr->op() == Token::INC ? 1 : -1;
   2423   __ mov(a0, v0);
   2424   if (ShouldInlineSmiCase(expr->op())) {
   2425     Label slow;
   2426     patch_site.EmitJumpIfNotSmi(v0, &slow);
   2427 
   2428     // Save result for postfix expressions.
   2429     if (expr->is_postfix()) {
   2430       if (!context()->IsEffect()) {
   2431         // Save the result on the stack. If we have a named or keyed property
   2432         // we store the result under the receiver that is currently on top
   2433         // of the stack.
   2434         switch (assign_type) {
   2435           case VARIABLE:
   2436             __ push(v0);
   2437             break;
   2438           case NAMED_PROPERTY:
   2439             __ sw(v0, MemOperand(sp, kPointerSize));
   2440             break;
   2441           case KEYED_PROPERTY:
   2442             __ sw(v0, MemOperand(sp, 2 * kPointerSize));
   2443             break;
   2444           case NAMED_SUPER_PROPERTY:
   2445           case KEYED_SUPER_PROPERTY:
   2446             UNREACHABLE();
   2447             break;
   2448         }
   2449       }
   2450     }
   2451 
   2452     Register scratch1 = a1;
   2453     __ li(scratch1, Operand(Smi::FromInt(count_value)));
   2454     __ AddBranchNoOvf(v0, v0, Operand(scratch1), &done);
   2455     // Call stub. Undo operation first.
   2456     __ Move(v0, a0);
   2457     __ jmp(&stub_call);
   2458     __ bind(&slow);
   2459   }
   2460 
   2461   // Convert old value into a number.
   2462   __ Call(isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
   2463   RestoreContext();
   2464   PrepareForBailoutForId(expr->ToNumberId(), BailoutState::TOS_REGISTER);
   2465 
   2466   // Save result for postfix expressions.
   2467   if (expr->is_postfix()) {
   2468     if (!context()->IsEffect()) {
   2469       // Save the result on the stack. If we have a named or keyed property
   2470       // we store the result under the receiver that is currently on top
   2471       // of the stack.
   2472       switch (assign_type) {
   2473         case VARIABLE:
   2474           PushOperand(v0);
   2475           break;
   2476         case NAMED_PROPERTY:
   2477           __ sw(v0, MemOperand(sp, kPointerSize));
   2478           break;
   2479         case KEYED_PROPERTY:
   2480           __ sw(v0, MemOperand(sp, 2 * kPointerSize));
   2481           break;
   2482         case NAMED_SUPER_PROPERTY:
   2483         case KEYED_SUPER_PROPERTY:
   2484           UNREACHABLE();
   2485           break;
   2486       }
   2487     }
   2488   }
   2489 
   2490   __ bind(&stub_call);
   2491   __ mov(a1, v0);
   2492   __ li(a0, Operand(Smi::FromInt(count_value)));
   2493 
   2494   SetExpressionPosition(expr);
   2495 
   2496   Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code();
   2497   CallIC(code, expr->CountBinOpFeedbackId());
   2498   patch_site.EmitPatchInfo();
   2499   __ bind(&done);
   2500 
   2501   // Store the value returned in v0.
   2502   switch (assign_type) {
   2503     case VARIABLE: {
   2504       VariableProxy* proxy = expr->expression()->AsVariableProxy();
   2505       if (expr->is_postfix()) {
   2506         { EffectContext context(this);
   2507           EmitVariableAssignment(proxy->var(), Token::ASSIGN, expr->CountSlot(),
   2508                                  proxy->hole_check_mode());
   2509           PrepareForBailoutForId(expr->AssignmentId(),
   2510                                  BailoutState::TOS_REGISTER);
   2511           context.Plug(v0);
   2512         }
   2513         // For all contexts except EffectConstant we have the result on
   2514         // top of the stack.
   2515         if (!context()->IsEffect()) {
   2516           context()->PlugTOS();
   2517         }
   2518       } else {
   2519         EmitVariableAssignment(proxy->var(), Token::ASSIGN, expr->CountSlot(),
   2520                                proxy->hole_check_mode());
   2521         PrepareForBailoutForId(expr->AssignmentId(),
   2522                                BailoutState::TOS_REGISTER);
   2523         context()->Plug(v0);
   2524       }
   2525       break;
   2526     }
   2527     case NAMED_PROPERTY: {
   2528       __ mov(StoreDescriptor::ValueRegister(), result_register());
   2529       PopOperand(StoreDescriptor::ReceiverRegister());
   2530       CallStoreIC(expr->CountSlot(), prop->key()->AsLiteral()->value());
   2531       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
   2532       if (expr->is_postfix()) {
   2533         if (!context()->IsEffect()) {
   2534           context()->PlugTOS();
   2535         }
   2536       } else {
   2537         context()->Plug(v0);
   2538       }
   2539       break;
   2540     }
   2541     case KEYED_PROPERTY: {
   2542       __ mov(StoreDescriptor::ValueRegister(), result_register());
   2543       PopOperands(StoreDescriptor::ReceiverRegister(),
   2544                   StoreDescriptor::NameRegister());
   2545       CallKeyedStoreIC(expr->CountSlot());
   2546       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
   2547       if (expr->is_postfix()) {
   2548         if (!context()->IsEffect()) {
   2549           context()->PlugTOS();
   2550         }
   2551       } else {
   2552         context()->Plug(v0);
   2553       }
   2554       break;
   2555     }
   2556     case NAMED_SUPER_PROPERTY:
   2557     case KEYED_SUPER_PROPERTY:
   2558       UNREACHABLE();
   2559       break;
   2560   }
   2561 }
   2562 
   2563 
   2564 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
   2565                                                  Expression* sub_expr,
   2566                                                  Handle<String> check) {
   2567   Label materialize_true, materialize_false;
   2568   Label* if_true = NULL;
   2569   Label* if_false = NULL;
   2570   Label* fall_through = NULL;
   2571   context()->PrepareTest(&materialize_true, &materialize_false,
   2572                          &if_true, &if_false, &fall_through);
   2573 
   2574   { AccumulatorValueContext context(this);
   2575     VisitForTypeofValue(sub_expr);
   2576   }
   2577   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2578 
   2579   Factory* factory = isolate()->factory();
   2580   if (String::Equals(check, factory->number_string())) {
   2581     __ JumpIfSmi(v0, if_true);
   2582     __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
   2583     __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
   2584     Split(eq, v0, Operand(at), if_true, if_false, fall_through);
   2585   } else if (String::Equals(check, factory->string_string())) {
   2586     __ JumpIfSmi(v0, if_false);
   2587     __ GetObjectType(v0, v0, a1);
   2588     Split(lt, a1, Operand(FIRST_NONSTRING_TYPE), if_true, if_false,
   2589           fall_through);
   2590   } else if (String::Equals(check, factory->symbol_string())) {
   2591     __ JumpIfSmi(v0, if_false);
   2592     __ GetObjectType(v0, v0, a1);
   2593     Split(eq, a1, Operand(SYMBOL_TYPE), if_true, if_false, fall_through);
   2594   } else if (String::Equals(check, factory->boolean_string())) {
   2595     __ LoadRoot(at, Heap::kTrueValueRootIndex);
   2596     __ Branch(if_true, eq, v0, Operand(at));
   2597     __ LoadRoot(at, Heap::kFalseValueRootIndex);
   2598     Split(eq, v0, Operand(at), if_true, if_false, fall_through);
   2599   } else if (String::Equals(check, factory->undefined_string())) {
   2600     __ LoadRoot(at, Heap::kNullValueRootIndex);
   2601     __ Branch(if_false, eq, v0, Operand(at));
   2602     __ JumpIfSmi(v0, if_false);
   2603     // Check for undetectable objects => true.
   2604     __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
   2605     __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
   2606     __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
   2607     Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
   2608   } else if (String::Equals(check, factory->function_string())) {
   2609     __ JumpIfSmi(v0, if_false);
   2610     __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
   2611     __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
   2612     __ And(a1, a1,
   2613            Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
   2614     Split(eq, a1, Operand(1 << Map::kIsCallable), if_true, if_false,
   2615           fall_through);
   2616   } else if (String::Equals(check, factory->object_string())) {
   2617     __ JumpIfSmi(v0, if_false);
   2618     __ LoadRoot(at, Heap::kNullValueRootIndex);
   2619     __ Branch(if_true, eq, v0, Operand(at));
   2620     STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
   2621     __ GetObjectType(v0, v0, a1);
   2622     __ Branch(if_false, lt, a1, Operand(FIRST_JS_RECEIVER_TYPE));
   2623     // Check for callable or undetectable objects => false.
   2624     __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
   2625     __ And(a1, a1,
   2626            Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
   2627     Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through);
   2628   } else {
   2629     if (if_false != fall_through) __ jmp(if_false);
   2630   }
   2631   context()->Plug(if_true, if_false);
   2632 }
   2633 
   2634 
   2635 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
   2636   Comment cmnt(masm_, "[ CompareOperation");
   2637 
   2638   // First we try a fast inlined version of the compare when one of
   2639   // the operands is a literal.
   2640   if (TryLiteralCompare(expr)) return;
   2641 
   2642   // Always perform the comparison for its control flow.  Pack the result
   2643   // into the expression's context after the comparison is performed.
   2644   Label materialize_true, materialize_false;
   2645   Label* if_true = NULL;
   2646   Label* if_false = NULL;
   2647   Label* fall_through = NULL;
   2648   context()->PrepareTest(&materialize_true, &materialize_false,
   2649                          &if_true, &if_false, &fall_through);
   2650 
   2651   Token::Value op = expr->op();
   2652   VisitForStackValue(expr->left());
   2653   switch (op) {
   2654     case Token::IN:
   2655       VisitForStackValue(expr->right());
   2656       SetExpressionPosition(expr);
   2657       EmitHasProperty();
   2658       PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
   2659       __ LoadRoot(t0, Heap::kTrueValueRootIndex);
   2660       Split(eq, v0, Operand(t0), if_true, if_false, fall_through);
   2661       break;
   2662 
   2663     case Token::INSTANCEOF: {
   2664       VisitForAccumulatorValue(expr->right());
   2665       SetExpressionPosition(expr);
   2666       __ mov(a0, result_register());
   2667       PopOperand(a1);
   2668       __ Call(isolate()->builtins()->InstanceOf(), RelocInfo::CODE_TARGET);
   2669       RestoreContext();
   2670       PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
   2671       __ LoadRoot(at, Heap::kTrueValueRootIndex);
   2672       Split(eq, v0, Operand(at), if_true, if_false, fall_through);
   2673       break;
   2674     }
   2675 
   2676     default: {
   2677       VisitForAccumulatorValue(expr->right());
   2678       SetExpressionPosition(expr);
   2679       Condition cc = CompareIC::ComputeCondition(op);
   2680       __ mov(a0, result_register());
   2681       PopOperand(a1);
   2682 
   2683       bool inline_smi_code = ShouldInlineSmiCase(op);
   2684       JumpPatchSite patch_site(masm_);
   2685       if (inline_smi_code) {
   2686         Label slow_case;
   2687         __ Or(a2, a0, Operand(a1));
   2688         patch_site.EmitJumpIfNotSmi(a2, &slow_case);
   2689         Split(cc, a1, Operand(a0), if_true, if_false, NULL);
   2690         __ bind(&slow_case);
   2691       }
   2692 
   2693       Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
   2694       CallIC(ic, expr->CompareOperationFeedbackId());
   2695       patch_site.EmitPatchInfo();
   2696       PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2697       Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
   2698     }
   2699   }
   2700 
   2701   // Convert the result of the comparison into one expected for this
   2702   // expression's context.
   2703   context()->Plug(if_true, if_false);
   2704 }
   2705 
   2706 
   2707 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
   2708                                               Expression* sub_expr,
   2709                                               NilValue nil) {
   2710   Label materialize_true, materialize_false;
   2711   Label* if_true = NULL;
   2712   Label* if_false = NULL;
   2713   Label* fall_through = NULL;
   2714   context()->PrepareTest(&materialize_true, &materialize_false,
   2715                          &if_true, &if_false, &fall_through);
   2716 
   2717   VisitForAccumulatorValue(sub_expr);
   2718   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2719   if (expr->op() == Token::EQ_STRICT) {
   2720     Heap::RootListIndex nil_value = nil == kNullValue ?
   2721         Heap::kNullValueRootIndex :
   2722         Heap::kUndefinedValueRootIndex;
   2723     __ LoadRoot(a1, nil_value);
   2724     Split(eq, v0, Operand(a1), if_true, if_false, fall_through);
   2725   } else {
   2726     __ JumpIfSmi(v0, if_false);
   2727     __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
   2728     __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
   2729     __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
   2730     Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
   2731   }
   2732   context()->Plug(if_true, if_false);
   2733 }
   2734 
   2735 
   2736 Register FullCodeGenerator::result_register() {
   2737   return v0;
   2738 }
   2739 
   2740 
   2741 Register FullCodeGenerator::context_register() {
   2742   return cp;
   2743 }
   2744 
   2745 void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) {
   2746   DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
   2747   __ lw(value, MemOperand(fp, frame_offset));
   2748 }
   2749 
   2750 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
   2751   DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
   2752   __ sw(value, MemOperand(fp, frame_offset));
   2753 }
   2754 
   2755 
   2756 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
   2757   __ lw(dst, ContextMemOperand(cp, context_index));
   2758 }
   2759 
   2760 
   2761 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
   2762   DeclarationScope* closure_scope = scope()->GetClosureScope();
   2763   if (closure_scope->is_script_scope() ||
   2764       closure_scope->is_module_scope()) {
   2765     // Contexts nested in the native context have a canonical empty function
   2766     // as their closure, not the anonymous closure containing the global
   2767     // code.
   2768     __ LoadNativeContextSlot(Context::CLOSURE_INDEX, at);
   2769   } else if (closure_scope->is_eval_scope()) {
   2770     // Contexts created by a call to eval have the same closure as the
   2771     // context calling eval, not the anonymous closure containing the eval
   2772     // code.  Fetch it from the context.
   2773     __ lw(at, ContextMemOperand(cp, Context::CLOSURE_INDEX));
   2774   } else {
   2775     DCHECK(closure_scope->is_function_scope());
   2776     __ lw(at, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
   2777   }
   2778   PushOperand(at);
   2779 }
   2780 
   2781 
   2782 #undef __
   2783 
   2784 
   2785 void BackEdgeTable::PatchAt(Code* unoptimized_code,
   2786                             Address pc,
   2787                             BackEdgeState target_state,
   2788                             Code* replacement_code) {
   2789   static const int kInstrSize = Assembler::kInstrSize;
   2790   Address pc_immediate_load_address =
   2791       Assembler::target_address_from_return_address(pc);
   2792   Address branch_address = pc_immediate_load_address - 2 * kInstrSize;
   2793   Isolate* isolate = unoptimized_code->GetIsolate();
   2794   CodePatcher patcher(isolate, branch_address, 1);
   2795 
   2796   switch (target_state) {
   2797     case INTERRUPT:
   2798       // slt at, a3, zero_reg (in case of count based interrupts)
   2799       // beq at, zero_reg, ok
   2800       // lui t9, <interrupt stub address> upper
   2801       // ori t9, <interrupt stub address> lower
   2802       // jalr t9
   2803       // nop
   2804       // ok-label ----- pc_after points here
   2805       patcher.masm()->slt(at, a3, zero_reg);
   2806       break;
   2807     case ON_STACK_REPLACEMENT:
   2808       // addiu at, zero_reg, 1
   2809       // beq at, zero_reg, ok  ;; Not changed
   2810       // lui t9, <on-stack replacement address> upper
   2811       // ori t9, <on-stack replacement address> lower
   2812       // jalr t9  ;; Not changed
   2813       // nop  ;; Not changed
   2814       // ok-label ----- pc_after points here
   2815       patcher.masm()->addiu(at, zero_reg, 1);
   2816       break;
   2817   }
   2818   // Replace the stack check address in the load-immediate (lui/ori pair)
   2819   // with the entry address of the replacement code.
   2820   Assembler::set_target_address_at(isolate, pc_immediate_load_address,
   2821                                    replacement_code->entry());
   2822 
   2823   unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
   2824       unoptimized_code, pc_immediate_load_address, replacement_code);
   2825 }
   2826 
   2827 
   2828 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
   2829     Isolate* isolate,
   2830     Code* unoptimized_code,
   2831     Address pc) {
   2832   static const int kInstrSize = Assembler::kInstrSize;
   2833   Address pc_immediate_load_address =
   2834       Assembler::target_address_from_return_address(pc);
   2835   Address branch_address = pc_immediate_load_address - 2 * kInstrSize;
   2836 
   2837   DCHECK(Assembler::IsBeq(Assembler::instr_at(branch_address + kInstrSize)));
   2838   if (!Assembler::IsAddImmediate(Assembler::instr_at(branch_address))) {
   2839     DCHECK(reinterpret_cast<uint32_t>(
   2840         Assembler::target_address_at(pc_immediate_load_address)) ==
   2841            reinterpret_cast<uint32_t>(
   2842                isolate->builtins()->InterruptCheck()->entry()));
   2843     return INTERRUPT;
   2844   }
   2845 
   2846   DCHECK(Assembler::IsAddImmediate(Assembler::instr_at(branch_address)));
   2847 
   2848   DCHECK(reinterpret_cast<uint32_t>(
   2849              Assembler::target_address_at(pc_immediate_load_address)) ==
   2850          reinterpret_cast<uint32_t>(
   2851              isolate->builtins()->OnStackReplacement()->entry()));
   2852   return ON_STACK_REPLACEMENT;
   2853 }
   2854 
   2855 
   2856 }  // namespace internal
   2857 }  // namespace v8
   2858 
   2859 #endif  // V8_TARGET_ARCH_MIPS
   2860