Home | History | Annotate | Download | only in x64
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #if V8_TARGET_ARCH_X64
      6 
      7 #include "src/full-codegen/full-codegen.h"
      8 #include "src/ast/compile-time-value.h"
      9 #include "src/ast/scopes.h"
     10 #include "src/code-factory.h"
     11 #include "src/code-stubs.h"
     12 #include "src/codegen.h"
     13 #include "src/compilation-info.h"
     14 #include "src/compiler.h"
     15 #include "src/debug/debug.h"
     16 #include "src/ic/ic.h"
     17 
     18 namespace v8 {
     19 namespace internal {
     20 
     21 #define __ ACCESS_MASM(masm())
     22 
     23 class JumpPatchSite BASE_EMBEDDED {
     24  public:
     25   explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
     26 #ifdef DEBUG
     27     info_emitted_ = false;
     28 #endif
     29   }
     30 
     31   ~JumpPatchSite() {
     32     DCHECK(patch_site_.is_bound() == info_emitted_);
     33   }
     34 
     35   void EmitJumpIfNotSmi(Register reg,
     36                         Label* target,
     37                         Label::Distance near_jump = Label::kFar) {
     38     __ testb(reg, Immediate(kSmiTagMask));
     39     EmitJump(not_carry, target, near_jump);   // Always taken before patched.
     40   }
     41 
     42   void EmitJumpIfSmi(Register reg,
     43                      Label* target,
     44                      Label::Distance near_jump = Label::kFar) {
     45     __ testb(reg, Immediate(kSmiTagMask));
     46     EmitJump(carry, target, near_jump);  // Never taken before patched.
     47   }
     48 
     49   void EmitPatchInfo() {
     50     if (patch_site_.is_bound()) {
     51       int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
     52       DCHECK(is_uint8(delta_to_patch_site));
     53       __ testl(rax, Immediate(delta_to_patch_site));
     54 #ifdef DEBUG
     55       info_emitted_ = true;
     56 #endif
     57     } else {
     58       __ nop();  // Signals no inlined code.
     59     }
     60   }
     61 
     62  private:
     63   // jc will be patched with jz, jnc will become jnz.
     64   void EmitJump(Condition cc, Label* target, Label::Distance near_jump) {
     65     DCHECK(!patch_site_.is_bound() && !info_emitted_);
     66     DCHECK(cc == carry || cc == not_carry);
     67     __ bind(&patch_site_);
     68     __ j(cc, target, near_jump);
     69   }
     70 
     71   MacroAssembler* masm() { return masm_; }
     72   MacroAssembler* masm_;
     73   Label patch_site_;
     74 #ifdef DEBUG
     75   bool info_emitted_;
     76 #endif
     77 };
     78 
     79 
     80 // Generate code for a JS function.  On entry to the function the receiver
     81 // and arguments have been pushed on the stack left to right, with the
     82 // return address on top of them.  The actual argument count matches the
     83 // formal parameter count expected by the function.
     84 //
     85 // The live registers are:
     86 //   o rdi: the JS function object being called (i.e. ourselves)
     87 //   o rdx: the new target value
     88 //   o rsi: our context
     89 //   o rbp: our caller's frame pointer
     90 //   o rsp: stack pointer (pointing to return address)
     91 //
     92 // The function builds a JS frame.  Please see JavaScriptFrameConstants in
     93 // frames-x64.h for its layout.
     94 void FullCodeGenerator::Generate() {
     95   CompilationInfo* info = info_;
     96   DCHECK_EQ(scope(), info->scope());
     97   profiling_counter_ = isolate()->factory()->NewCell(
     98       Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
     99   SetFunctionPosition(literal());
    100   Comment cmnt(masm_, "[ function compiled by full code generator");
    101 
    102   ProfileEntryHookStub::MaybeCallEntryHook(masm_);
    103 
    104   if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
    105     StackArgumentsAccessor args(rsp, info->scope()->num_parameters());
    106     __ movp(rcx, args.GetReceiverOperand());
    107     __ AssertNotSmi(rcx);
    108     __ CmpObjectType(rcx, FIRST_JS_RECEIVER_TYPE, rcx);
    109     __ Assert(above_equal, kSloppyFunctionExpectsJSReceiverReceiver);
    110   }
    111 
    112   // Open a frame scope to indicate that there is a frame on the stack.  The
    113   // MANUAL indicates that the scope shouldn't actually generate code to set up
    114   // the frame (that is done below).
    115   FrameScope frame_scope(masm_, StackFrame::MANUAL);
    116 
    117   info->set_prologue_offset(masm_->pc_offset());
    118   __ Prologue(info->GeneratePreagedPrologue());
    119 
    120   // Increment invocation count for the function.
    121   {
    122     Comment cmnt(masm_, "[ Increment invocation count");
    123     __ movp(rcx, FieldOperand(rdi, JSFunction::kLiteralsOffset));
    124     __ movp(rcx, FieldOperand(rcx, LiteralsArray::kFeedbackVectorOffset));
    125     __ SmiAddConstant(
    126         FieldOperand(rcx,
    127                      TypeFeedbackVector::kInvocationCountIndex * kPointerSize +
    128                          TypeFeedbackVector::kHeaderSize),
    129         Smi::FromInt(1));
    130   }
    131 
    132   { Comment cmnt(masm_, "[ Allocate locals");
    133     int locals_count = info->scope()->num_stack_slots();
    134     // Generators allocate locals, if any, in context slots.
    135     DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0);
    136     OperandStackDepthIncrement(locals_count);
    137     if (locals_count == 1) {
    138       __ PushRoot(Heap::kUndefinedValueRootIndex);
    139     } else if (locals_count > 1) {
    140       if (locals_count >= 128) {
    141         Label ok;
    142         __ movp(rcx, rsp);
    143         __ subp(rcx, Immediate(locals_count * kPointerSize));
    144         __ CompareRoot(rcx, Heap::kRealStackLimitRootIndex);
    145         __ j(above_equal, &ok, Label::kNear);
    146         __ CallRuntime(Runtime::kThrowStackOverflow);
    147         __ bind(&ok);
    148       }
    149       __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
    150       const int kMaxPushes = 32;
    151       if (locals_count >= kMaxPushes) {
    152         int loop_iterations = locals_count / kMaxPushes;
    153         __ movp(rcx, Immediate(loop_iterations));
    154         Label loop_header;
    155         __ bind(&loop_header);
    156         // Do pushes.
    157         for (int i = 0; i < kMaxPushes; i++) {
    158           __ Push(rax);
    159         }
    160         // Continue loop if not done.
    161         __ decp(rcx);
    162         __ j(not_zero, &loop_header, Label::kNear);
    163       }
    164       int remaining = locals_count % kMaxPushes;
    165       // Emit the remaining pushes.
    166       for (int i  = 0; i < remaining; i++) {
    167         __ Push(rax);
    168       }
    169     }
    170   }
    171 
    172   bool function_in_register = true;
    173 
    174   // Possibly allocate a local context.
    175   if (info->scope()->NeedsContext()) {
    176     Comment cmnt(masm_, "[ Allocate context");
    177     bool need_write_barrier = true;
    178     int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
    179     // Argument to NewContext is the function, which is still in rdi.
    180     if (info->scope()->is_script_scope()) {
    181       __ Push(rdi);
    182       __ Push(info->scope()->scope_info());
    183       __ CallRuntime(Runtime::kNewScriptContext);
    184       PrepareForBailoutForId(BailoutId::ScriptContext(),
    185                              BailoutState::TOS_REGISTER);
    186       // The new target value is not used, clobbering is safe.
    187       DCHECK_NULL(info->scope()->new_target_var());
    188     } else {
    189       if (info->scope()->new_target_var() != nullptr) {
    190         __ Push(rdx);  // Preserve new target.
    191       }
    192       if (slots <= FastNewFunctionContextStub::kMaximumSlots) {
    193         FastNewFunctionContextStub stub(isolate());
    194         __ Set(FastNewFunctionContextDescriptor::SlotsRegister(), slots);
    195         __ CallStub(&stub);
    196         // Result of FastNewFunctionContextStub is always in new space.
    197         need_write_barrier = false;
    198       } else {
    199         __ Push(rdi);
    200         __ CallRuntime(Runtime::kNewFunctionContext);
    201       }
    202       if (info->scope()->new_target_var() != nullptr) {
    203         __ Pop(rdx);  // Restore new target.
    204       }
    205     }
    206     function_in_register = false;
    207     // Context is returned in rax.  It replaces the context passed to us.
    208     // It's saved in the stack and kept live in rsi.
    209     __ movp(rsi, rax);
    210     __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rax);
    211 
    212     // Copy any necessary parameters into the context.
    213     int num_parameters = info->scope()->num_parameters();
    214     int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
    215     for (int i = first_parameter; i < num_parameters; i++) {
    216       Variable* var =
    217           (i == -1) ? info->scope()->receiver() : info->scope()->parameter(i);
    218       if (var->IsContextSlot()) {
    219         int parameter_offset = StandardFrameConstants::kCallerSPOffset +
    220             (num_parameters - 1 - i) * kPointerSize;
    221         // Load parameter from stack.
    222         __ movp(rax, Operand(rbp, parameter_offset));
    223         // Store it in the context.
    224         int context_offset = Context::SlotOffset(var->index());
    225         __ movp(Operand(rsi, context_offset), rax);
    226         // Update the write barrier.  This clobbers rax and rbx.
    227         if (need_write_barrier) {
    228           __ RecordWriteContextSlot(
    229               rsi, context_offset, rax, rbx, kDontSaveFPRegs);
    230         } else if (FLAG_debug_code) {
    231           Label done;
    232           __ JumpIfInNewSpace(rsi, rax, &done, Label::kNear);
    233           __ Abort(kExpectedNewSpaceObject);
    234           __ bind(&done);
    235         }
    236       }
    237     }
    238   }
    239 
    240   // Register holding this function and new target are both trashed in case we
    241   // bailout here. But since that can happen only when new target is not used
    242   // and we allocate a context, the value of |function_in_register| is correct.
    243   PrepareForBailoutForId(BailoutId::FunctionContext(),
    244                          BailoutState::NO_REGISTERS);
    245 
    246   // Possibly set up a local binding to the this function which is used in
    247   // derived constructors with super calls.
    248   Variable* this_function_var = info->scope()->this_function_var();
    249   if (this_function_var != nullptr) {
    250     Comment cmnt(masm_, "[ This function");
    251     if (!function_in_register) {
    252       __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
    253       // The write barrier clobbers register again, keep it marked as such.
    254     }
    255     SetVar(this_function_var, rdi, rbx, rcx);
    256   }
    257 
    258   // Possibly set up a local binding to the new target value.
    259   Variable* new_target_var = info->scope()->new_target_var();
    260   if (new_target_var != nullptr) {
    261     Comment cmnt(masm_, "[ new.target");
    262     SetVar(new_target_var, rdx, rbx, rcx);
    263   }
    264 
    265   // Possibly allocate RestParameters
    266   Variable* rest_param = info->scope()->rest_parameter();
    267   if (rest_param != nullptr) {
    268     Comment cmnt(masm_, "[ Allocate rest parameter array");
    269     if (!function_in_register) {
    270       __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
    271     }
    272     FastNewRestParameterStub stub(isolate());
    273     __ CallStub(&stub);
    274     function_in_register = false;
    275     SetVar(rest_param, rax, rbx, rdx);
    276   }
    277 
    278   // Possibly allocate an arguments object.
    279   DCHECK_EQ(scope(), info->scope());
    280   Variable* arguments = info->scope()->arguments();
    281   if (arguments != NULL) {
    282     // Arguments object must be allocated after the context object, in
    283     // case the "arguments" or ".arguments" variables are in the context.
    284     Comment cmnt(masm_, "[ Allocate arguments object");
    285     if (!function_in_register) {
    286       __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
    287     }
    288     if (is_strict(language_mode()) || !has_simple_parameters()) {
    289       FastNewStrictArgumentsStub stub(isolate());
    290       __ CallStub(&stub);
    291     } else if (literal()->has_duplicate_parameters()) {
    292       __ Push(rdi);
    293       __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
    294     } else {
    295       FastNewSloppyArgumentsStub stub(isolate());
    296       __ CallStub(&stub);
    297     }
    298 
    299     SetVar(arguments, rax, rbx, rdx);
    300   }
    301 
    302   if (FLAG_trace) {
    303     __ CallRuntime(Runtime::kTraceEnter);
    304   }
    305 
    306   // Visit the declarations and body unless there is an illegal
    307   // redeclaration.
    308   PrepareForBailoutForId(BailoutId::FunctionEntry(),
    309                          BailoutState::NO_REGISTERS);
    310   {
    311     Comment cmnt(masm_, "[ Declarations");
    312     VisitDeclarations(info->scope()->declarations());
    313   }
    314 
    315   // Assert that the declarations do not use ICs. Otherwise the debugger
    316   // won't be able to redirect a PC at an IC to the correct IC in newly
    317   // recompiled code.
    318   DCHECK_EQ(0, ic_total_count_);
    319 
    320   {
    321     Comment cmnt(masm_, "[ Stack check");
    322     PrepareForBailoutForId(BailoutId::Declarations(),
    323                            BailoutState::NO_REGISTERS);
    324     Label ok;
    325     __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
    326     __ j(above_equal, &ok, Label::kNear);
    327     __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
    328     __ bind(&ok);
    329   }
    330 
    331   {
    332     Comment cmnt(masm_, "[ Body");
    333     DCHECK(loop_depth() == 0);
    334     VisitStatements(literal()->body());
    335     DCHECK(loop_depth() == 0);
    336   }
    337 
    338   // Always emit a 'return undefined' in case control fell off the end of
    339   // the body.
    340   { Comment cmnt(masm_, "[ return <undefined>;");
    341     __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
    342     EmitReturnSequence();
    343   }
    344 }
    345 
    346 
    347 void FullCodeGenerator::ClearAccumulator() {
    348   __ Set(rax, 0);
    349 }
    350 
    351 
    352 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
    353   __ Move(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
    354   __ SmiAddConstant(FieldOperand(rbx, Cell::kValueOffset),
    355                     Smi::FromInt(-delta));
    356 }
    357 
    358 
    359 void FullCodeGenerator::EmitProfilingCounterReset() {
    360   int reset_value = FLAG_interrupt_budget;
    361   __ Move(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
    362   __ Move(kScratchRegister, Smi::FromInt(reset_value));
    363   __ movp(FieldOperand(rbx, Cell::kValueOffset), kScratchRegister);
    364 }
    365 
    366 
    367 static const byte kJnsOffset = kPointerSize == kInt64Size ? 0x1d : 0x14;
    368 
    369 
    370 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
    371                                                 Label* back_edge_target) {
    372   Comment cmnt(masm_, "[ Back edge bookkeeping");
    373   Label ok;
    374 
    375   DCHECK(back_edge_target->is_bound());
    376   int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
    377   int weight = Min(kMaxBackEdgeWeight,
    378                    Max(1, distance / kCodeSizeMultiplier));
    379   EmitProfilingCounterDecrement(weight);
    380 
    381   __ j(positive, &ok, Label::kNear);
    382   {
    383     PredictableCodeSizeScope predictible_code_size_scope(masm_, kJnsOffset);
    384     DontEmitDebugCodeScope dont_emit_debug_code_scope(masm_);
    385     __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
    386 
    387     // Record a mapping of this PC offset to the OSR id.  This is used to find
    388     // the AST id from the unoptimized code in order to use it as a key into
    389     // the deoptimization input data found in the optimized code.
    390     RecordBackEdge(stmt->OsrEntryId());
    391 
    392     EmitProfilingCounterReset();
    393   }
    394   __ bind(&ok);
    395 
    396   PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
    397   // Record a mapping of the OSR id to this PC.  This is used if the OSR
    398   // entry becomes the target of a bailout.  We don't expect it to be, but
    399   // we want it to work if it is.
    400   PrepareForBailoutForId(stmt->OsrEntryId(), BailoutState::NO_REGISTERS);
    401 }
    402 
    403 void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
    404     bool is_tail_call) {
    405   // Pretend that the exit is a backwards jump to the entry.
    406   int weight = 1;
    407   if (info_->ShouldSelfOptimize()) {
    408     weight = FLAG_interrupt_budget / FLAG_self_opt_count;
    409   } else {
    410     int distance = masm_->pc_offset();
    411     weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
    412   }
    413   EmitProfilingCounterDecrement(weight);
    414   Label ok;
    415   __ j(positive, &ok, Label::kNear);
    416   // Don't need to save result register if we are going to do a tail call.
    417   if (!is_tail_call) {
    418     __ Push(rax);
    419   }
    420   __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
    421   if (!is_tail_call) {
    422     __ Pop(rax);
    423   }
    424   EmitProfilingCounterReset();
    425   __ bind(&ok);
    426 }
    427 
    428 void FullCodeGenerator::EmitReturnSequence() {
    429   Comment cmnt(masm_, "[ Return sequence");
    430   if (return_label_.is_bound()) {
    431     __ jmp(&return_label_);
    432   } else {
    433     __ bind(&return_label_);
    434     if (FLAG_trace) {
    435       __ Push(rax);
    436       __ CallRuntime(Runtime::kTraceExit);
    437     }
    438     EmitProfilingCounterHandlingForReturnSequence(false);
    439 
    440     SetReturnPosition(literal());
    441     __ leave();
    442 
    443     int arg_count = info_->scope()->num_parameters() + 1;
    444     int arguments_bytes = arg_count * kPointerSize;
    445     __ Ret(arguments_bytes, rcx);
    446   }
    447 }
    448 
    449 void FullCodeGenerator::RestoreContext() {
    450   __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
    451 }
    452 
    453 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
    454   DCHECK(var->IsStackAllocated() || var->IsContextSlot());
    455   MemOperand operand = codegen()->VarOperand(var, result_register());
    456   codegen()->PushOperand(operand);
    457 }
    458 
    459 
    460 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
    461 }
    462 
    463 
    464 void FullCodeGenerator::AccumulatorValueContext::Plug(
    465     Heap::RootListIndex index) const {
    466   __ LoadRoot(result_register(), index);
    467 }
    468 
    469 
    470 void FullCodeGenerator::StackValueContext::Plug(
    471     Heap::RootListIndex index) const {
    472   codegen()->OperandStackDepthIncrement(1);
    473   __ PushRoot(index);
    474 }
    475 
    476 
    477 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
    478   codegen()->PrepareForBailoutBeforeSplit(condition(),
    479                                           true,
    480                                           true_label_,
    481                                           false_label_);
    482   if (index == Heap::kUndefinedValueRootIndex ||
    483       index == Heap::kNullValueRootIndex ||
    484       index == Heap::kFalseValueRootIndex) {
    485     if (false_label_ != fall_through_) __ jmp(false_label_);
    486   } else if (index == Heap::kTrueValueRootIndex) {
    487     if (true_label_ != fall_through_) __ jmp(true_label_);
    488   } else {
    489     __ LoadRoot(result_register(), index);
    490     codegen()->DoTest(this);
    491   }
    492 }
    493 
    494 
    495 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
    496 }
    497 
    498 
    499 void FullCodeGenerator::AccumulatorValueContext::Plug(
    500     Handle<Object> lit) const {
    501   if (lit->IsSmi()) {
    502     __ SafeMove(result_register(), Smi::cast(*lit));
    503   } else {
    504     __ Move(result_register(), lit);
    505   }
    506 }
    507 
    508 
    509 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
    510   codegen()->OperandStackDepthIncrement(1);
    511   if (lit->IsSmi()) {
    512     __ SafePush(Smi::cast(*lit));
    513   } else {
    514     __ Push(lit);
    515   }
    516 }
    517 
    518 
    519 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
    520   codegen()->PrepareForBailoutBeforeSplit(condition(),
    521                                           true,
    522                                           true_label_,
    523                                           false_label_);
    524   DCHECK(lit->IsNull(isolate()) || lit->IsUndefined(isolate()) ||
    525          !lit->IsUndetectable());
    526   if (lit->IsUndefined(isolate()) || lit->IsNull(isolate()) ||
    527       lit->IsFalse(isolate())) {
    528     if (false_label_ != fall_through_) __ jmp(false_label_);
    529   } else if (lit->IsTrue(isolate()) || lit->IsJSObject()) {
    530     if (true_label_ != fall_through_) __ jmp(true_label_);
    531   } else if (lit->IsString()) {
    532     if (String::cast(*lit)->length() == 0) {
    533       if (false_label_ != fall_through_) __ jmp(false_label_);
    534     } else {
    535       if (true_label_ != fall_through_) __ jmp(true_label_);
    536     }
    537   } else if (lit->IsSmi()) {
    538     if (Smi::cast(*lit)->value() == 0) {
    539       if (false_label_ != fall_through_) __ jmp(false_label_);
    540     } else {
    541       if (true_label_ != fall_through_) __ jmp(true_label_);
    542     }
    543   } else {
    544     // For simplicity we always test the accumulator register.
    545     __ Move(result_register(), lit);
    546     codegen()->DoTest(this);
    547   }
    548 }
    549 
    550 
    551 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
    552                                                        Register reg) const {
    553   DCHECK(count > 0);
    554   if (count > 1) codegen()->DropOperands(count - 1);
    555   __ movp(Operand(rsp, 0), reg);
    556 }
    557 
    558 
    559 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
    560                                             Label* materialize_false) const {
    561   DCHECK(materialize_true == materialize_false);
    562   __ bind(materialize_true);
    563 }
    564 
    565 
    566 void FullCodeGenerator::AccumulatorValueContext::Plug(
    567     Label* materialize_true,
    568     Label* materialize_false) const {
    569   Label done;
    570   __ bind(materialize_true);
    571   __ Move(result_register(), isolate()->factory()->true_value());
    572   __ jmp(&done, Label::kNear);
    573   __ bind(materialize_false);
    574   __ Move(result_register(), isolate()->factory()->false_value());
    575   __ bind(&done);
    576 }
    577 
    578 
    579 void FullCodeGenerator::StackValueContext::Plug(
    580     Label* materialize_true,
    581     Label* materialize_false) const {
    582   codegen()->OperandStackDepthIncrement(1);
    583   Label done;
    584   __ bind(materialize_true);
    585   __ Push(isolate()->factory()->true_value());
    586   __ jmp(&done, Label::kNear);
    587   __ bind(materialize_false);
    588   __ Push(isolate()->factory()->false_value());
    589   __ bind(&done);
    590 }
    591 
    592 
    593 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
    594                                           Label* materialize_false) const {
    595   DCHECK(materialize_true == true_label_);
    596   DCHECK(materialize_false == false_label_);
    597 }
    598 
    599 
    600 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
    601   Heap::RootListIndex value_root_index =
    602       flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
    603   __ LoadRoot(result_register(), value_root_index);
    604 }
    605 
    606 
    607 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
    608   codegen()->OperandStackDepthIncrement(1);
    609   Heap::RootListIndex value_root_index =
    610       flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
    611   __ PushRoot(value_root_index);
    612 }
    613 
    614 
    615 void FullCodeGenerator::TestContext::Plug(bool flag) const {
    616   codegen()->PrepareForBailoutBeforeSplit(condition(),
    617                                           true,
    618                                           true_label_,
    619                                           false_label_);
    620   if (flag) {
    621     if (true_label_ != fall_through_) __ jmp(true_label_);
    622   } else {
    623     if (false_label_ != fall_through_) __ jmp(false_label_);
    624   }
    625 }
    626 
    627 
    628 void FullCodeGenerator::DoTest(Expression* condition,
    629                                Label* if_true,
    630                                Label* if_false,
    631                                Label* fall_through) {
    632   Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate());
    633   CallIC(ic, condition->test_id());
    634   __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
    635   Split(equal, if_true, if_false, fall_through);
    636 }
    637 
    638 
    639 void FullCodeGenerator::Split(Condition cc,
    640                               Label* if_true,
    641                               Label* if_false,
    642                               Label* fall_through) {
    643   if (if_false == fall_through) {
    644     __ j(cc, if_true);
    645   } else if (if_true == fall_through) {
    646     __ j(NegateCondition(cc), if_false);
    647   } else {
    648     __ j(cc, if_true);
    649     __ jmp(if_false);
    650   }
    651 }
    652 
    653 
    654 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
    655   DCHECK(var->IsStackAllocated());
    656   // Offset is negative because higher indexes are at lower addresses.
    657   int offset = -var->index() * kPointerSize;
    658   // Adjust by a (parameter or local) base offset.
    659   if (var->IsParameter()) {
    660     offset += kFPOnStackSize + kPCOnStackSize +
    661               (info_->scope()->num_parameters() - 1) * kPointerSize;
    662   } else {
    663     offset += JavaScriptFrameConstants::kLocal0Offset;
    664   }
    665   return Operand(rbp, offset);
    666 }
    667 
    668 
    669 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
    670   DCHECK(var->IsContextSlot() || var->IsStackAllocated());
    671   if (var->IsContextSlot()) {
    672     int context_chain_length = scope()->ContextChainLength(var->scope());
    673     __ LoadContext(scratch, context_chain_length);
    674     return ContextOperand(scratch, var->index());
    675   } else {
    676     return StackOperand(var);
    677   }
    678 }
    679 
    680 
    681 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
    682   DCHECK(var->IsContextSlot() || var->IsStackAllocated());
    683   MemOperand location = VarOperand(var, dest);
    684   __ movp(dest, location);
    685 }
    686 
    687 
    688 void FullCodeGenerator::SetVar(Variable* var,
    689                                Register src,
    690                                Register scratch0,
    691                                Register scratch1) {
    692   DCHECK(var->IsContextSlot() || var->IsStackAllocated());
    693   DCHECK(!scratch0.is(src));
    694   DCHECK(!scratch0.is(scratch1));
    695   DCHECK(!scratch1.is(src));
    696   MemOperand location = VarOperand(var, scratch0);
    697   __ movp(location, src);
    698 
    699   // Emit the write barrier code if the location is in the heap.
    700   if (var->IsContextSlot()) {
    701     int offset = Context::SlotOffset(var->index());
    702     __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
    703   }
    704 }
    705 
    706 
    707 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
    708                                                      bool should_normalize,
    709                                                      Label* if_true,
    710                                                      Label* if_false) {
    711   // Only prepare for bailouts before splits if we're in a test
    712   // context. Otherwise, we let the Visit function deal with the
    713   // preparation to avoid preparing with the same AST id twice.
    714   if (!context()->IsTest()) return;
    715 
    716   Label skip;
    717   if (should_normalize) __ jmp(&skip, Label::kNear);
    718   PrepareForBailout(expr, BailoutState::TOS_REGISTER);
    719   if (should_normalize) {
    720     __ CompareRoot(rax, Heap::kTrueValueRootIndex);
    721     Split(equal, if_true, if_false, NULL);
    722     __ bind(&skip);
    723   }
    724 }
    725 
    726 
    727 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
    728   // The variable in the declaration always resides in the current context.
    729   DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
    730   if (FLAG_debug_code) {
    731     // Check that we're not inside a with or catch context.
    732     __ movp(rbx, FieldOperand(rsi, HeapObject::kMapOffset));
    733     __ CompareRoot(rbx, Heap::kWithContextMapRootIndex);
    734     __ Check(not_equal, kDeclarationInWithContext);
    735     __ CompareRoot(rbx, Heap::kCatchContextMapRootIndex);
    736     __ Check(not_equal, kDeclarationInCatchContext);
    737   }
    738 }
    739 
    740 
    741 void FullCodeGenerator::VisitVariableDeclaration(
    742     VariableDeclaration* declaration) {
    743   VariableProxy* proxy = declaration->proxy();
    744   Variable* variable = proxy->var();
    745   switch (variable->location()) {
    746     case VariableLocation::UNALLOCATED: {
    747       DCHECK(!variable->binding_needs_init());
    748       FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
    749       DCHECK(!slot.IsInvalid());
    750       globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
    751       globals_->Add(isolate()->factory()->undefined_value(), zone());
    752       break;
    753     }
    754     case VariableLocation::PARAMETER:
    755     case VariableLocation::LOCAL:
    756       if (variable->binding_needs_init()) {
    757         Comment cmnt(masm_, "[ VariableDeclaration");
    758         __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
    759         __ movp(StackOperand(variable), kScratchRegister);
    760       }
    761       break;
    762 
    763     case VariableLocation::CONTEXT:
    764       if (variable->binding_needs_init()) {
    765         Comment cmnt(masm_, "[ VariableDeclaration");
    766         EmitDebugCheckDeclarationContext(variable);
    767         __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
    768         __ movp(ContextOperand(rsi, variable->index()), kScratchRegister);
    769         // No write barrier since the hole value is in old space.
    770         PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
    771       }
    772       break;
    773 
    774     case VariableLocation::LOOKUP: {
    775       Comment cmnt(masm_, "[ VariableDeclaration");
    776       DCHECK_EQ(VAR, variable->mode());
    777       DCHECK(!variable->binding_needs_init());
    778       __ Push(variable->name());
    779       __ CallRuntime(Runtime::kDeclareEvalVar);
    780       PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
    781       break;
    782     }
    783 
    784     case VariableLocation::MODULE:
    785       UNREACHABLE();
    786   }
    787 }
    788 
    789 
    790 void FullCodeGenerator::VisitFunctionDeclaration(
    791     FunctionDeclaration* declaration) {
    792   VariableProxy* proxy = declaration->proxy();
    793   Variable* variable = proxy->var();
    794   switch (variable->location()) {
    795     case VariableLocation::UNALLOCATED: {
    796       FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
    797       DCHECK(!slot.IsInvalid());
    798       globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
    799       Handle<SharedFunctionInfo> function =
    800           Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
    801       // Check for stack-overflow exception.
    802       if (function.is_null()) return SetStackOverflow();
    803       globals_->Add(function, zone());
    804       break;
    805     }
    806 
    807     case VariableLocation::PARAMETER:
    808     case VariableLocation::LOCAL: {
    809       Comment cmnt(masm_, "[ FunctionDeclaration");
    810       VisitForAccumulatorValue(declaration->fun());
    811       __ movp(StackOperand(variable), result_register());
    812       break;
    813     }
    814 
    815     case VariableLocation::CONTEXT: {
    816       Comment cmnt(masm_, "[ FunctionDeclaration");
    817       EmitDebugCheckDeclarationContext(variable);
    818       VisitForAccumulatorValue(declaration->fun());
    819       __ movp(ContextOperand(rsi, variable->index()), result_register());
    820       int offset = Context::SlotOffset(variable->index());
    821       // We know that we have written a function, which is not a smi.
    822       __ RecordWriteContextSlot(rsi,
    823                                 offset,
    824                                 result_register(),
    825                                 rcx,
    826                                 kDontSaveFPRegs,
    827                                 EMIT_REMEMBERED_SET,
    828                                 OMIT_SMI_CHECK);
    829       PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
    830       break;
    831     }
    832 
    833     case VariableLocation::LOOKUP: {
    834       Comment cmnt(masm_, "[ FunctionDeclaration");
    835       PushOperand(variable->name());
    836       VisitForStackValue(declaration->fun());
    837       CallRuntimeWithOperands(Runtime::kDeclareEvalFunction);
    838       PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
    839       break;
    840     }
    841 
    842     case VariableLocation::MODULE:
    843       UNREACHABLE();
    844   }
    845 }
    846 
    847 
    848 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
    849   // Call the runtime to declare the globals.
    850   __ Push(pairs);
    851   __ Push(Smi::FromInt(DeclareGlobalsFlags()));
    852   __ EmitLoadTypeFeedbackVector(rax);
    853   __ Push(rax);
    854   __ CallRuntime(Runtime::kDeclareGlobals);
    855   // Return value is ignored.
    856 }
    857 
    858 
    859 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
    860   Comment cmnt(masm_, "[ SwitchStatement");
    861   Breakable nested_statement(this, stmt);
    862   SetStatementPosition(stmt);
    863 
    864   // Keep the switch value on the stack until a case matches.
    865   VisitForStackValue(stmt->tag());
    866   PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
    867 
    868   ZoneList<CaseClause*>* clauses = stmt->cases();
    869   CaseClause* default_clause = NULL;  // Can occur anywhere in the list.
    870 
    871   Label next_test;  // Recycled for each test.
    872   // Compile all the tests with branches to their bodies.
    873   for (int i = 0; i < clauses->length(); i++) {
    874     CaseClause* clause = clauses->at(i);
    875     clause->body_target()->Unuse();
    876 
    877     // The default is not a test, but remember it as final fall through.
    878     if (clause->is_default()) {
    879       default_clause = clause;
    880       continue;
    881     }
    882 
    883     Comment cmnt(masm_, "[ Case comparison");
    884     __ bind(&next_test);
    885     next_test.Unuse();
    886 
    887     // Compile the label expression.
    888     VisitForAccumulatorValue(clause->label());
    889 
    890     // Perform the comparison as if via '==='.
    891     __ movp(rdx, Operand(rsp, 0));  // Switch value.
    892     bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
    893     JumpPatchSite patch_site(masm_);
    894     if (inline_smi_code) {
    895       Label slow_case;
    896       __ movp(rcx, rdx);
    897       __ orp(rcx, rax);
    898       patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
    899 
    900       __ cmpp(rdx, rax);
    901       __ j(not_equal, &next_test);
    902       __ Drop(1);  // Switch value is no longer needed.
    903       __ jmp(clause->body_target());
    904       __ bind(&slow_case);
    905     }
    906 
    907     // Record position before stub call for type feedback.
    908     SetExpressionPosition(clause);
    909     Handle<Code> ic =
    910         CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
    911     CallIC(ic, clause->CompareId());
    912     patch_site.EmitPatchInfo();
    913 
    914     Label skip;
    915     __ jmp(&skip, Label::kNear);
    916     PrepareForBailout(clause, BailoutState::TOS_REGISTER);
    917     __ CompareRoot(rax, Heap::kTrueValueRootIndex);
    918     __ j(not_equal, &next_test);
    919     __ Drop(1);
    920     __ jmp(clause->body_target());
    921     __ bind(&skip);
    922 
    923     __ testp(rax, rax);
    924     __ j(not_equal, &next_test);
    925     __ Drop(1);  // Switch value is no longer needed.
    926     __ jmp(clause->body_target());
    927   }
    928 
    929   // Discard the test value and jump to the default if present, otherwise to
    930   // the end of the statement.
    931   __ bind(&next_test);
    932   DropOperands(1);  // Switch value is no longer needed.
    933   if (default_clause == NULL) {
    934     __ jmp(nested_statement.break_label());
    935   } else {
    936     __ jmp(default_clause->body_target());
    937   }
    938 
    939   // Compile all the case bodies.
    940   for (int i = 0; i < clauses->length(); i++) {
    941     Comment cmnt(masm_, "[ Case body");
    942     CaseClause* clause = clauses->at(i);
    943     __ bind(clause->body_target());
    944     PrepareForBailoutForId(clause->EntryId(), BailoutState::NO_REGISTERS);
    945     VisitStatements(clause->statements());
    946   }
    947 
    948   __ bind(nested_statement.break_label());
    949   PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
    950 }
    951 
    952 
    953 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
    954   Comment cmnt(masm_, "[ ForInStatement");
    955   SetStatementPosition(stmt, SKIP_BREAK);
    956 
    957   FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
    958 
    959   // Get the object to enumerate over.
    960   SetExpressionAsStatementPosition(stmt->enumerable());
    961   VisitForAccumulatorValue(stmt->enumerable());
    962   OperandStackDepthIncrement(5);
    963 
    964   Label loop, exit;
    965   Iteration loop_statement(this, stmt);
    966   increment_loop_depth();
    967 
    968   // If the object is null or undefined, skip over the loop, otherwise convert
    969   // it to a JS receiver.  See ECMA-262 version 5, section 12.6.4.
    970   Label convert, done_convert;
    971   __ JumpIfSmi(rax, &convert, Label::kNear);
    972   __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx);
    973   __ j(above_equal, &done_convert, Label::kNear);
    974   __ CompareRoot(rax, Heap::kNullValueRootIndex);
    975   __ j(equal, &exit);
    976   __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
    977   __ j(equal, &exit);
    978   __ bind(&convert);
    979   __ Call(isolate()->builtins()->ToObject(), RelocInfo::CODE_TARGET);
    980   RestoreContext();
    981   __ bind(&done_convert);
    982   PrepareForBailoutForId(stmt->ToObjectId(), BailoutState::TOS_REGISTER);
    983   __ Push(rax);
    984 
    985   // Check cache validity in generated code. If we cannot guarantee cache
    986   // validity, call the runtime system to check cache validity or get the
    987   // property names in a fixed array. Note: Proxies never have an enum cache,
    988   // so will always take the slow path.
    989   Label call_runtime;
    990   __ CheckEnumCache(&call_runtime);
    991 
    992   // The enum cache is valid.  Load the map of the object being
    993   // iterated over and use the cache for the iteration.
    994   Label use_cache;
    995   __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset));
    996   __ jmp(&use_cache, Label::kNear);
    997 
    998   // Get the set of properties to enumerate.
    999   __ bind(&call_runtime);
   1000   __ Push(rax);  // Duplicate the enumerable object on the stack.
   1001   __ CallRuntime(Runtime::kForInEnumerate);
   1002   PrepareForBailoutForId(stmt->EnumId(), BailoutState::TOS_REGISTER);
   1003 
   1004   // If we got a map from the runtime call, we can do a fast
   1005   // modification check. Otherwise, we got a fixed array, and we have
   1006   // to do a slow check.
   1007   Label fixed_array;
   1008   __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
   1009                  Heap::kMetaMapRootIndex);
   1010   __ j(not_equal, &fixed_array);
   1011 
   1012   // We got a map in register rax. Get the enumeration cache from it.
   1013   __ bind(&use_cache);
   1014 
   1015   Label no_descriptors;
   1016 
   1017   __ EnumLength(rdx, rax);
   1018   __ Cmp(rdx, Smi::kZero);
   1019   __ j(equal, &no_descriptors);
   1020 
   1021   __ LoadInstanceDescriptors(rax, rcx);
   1022   __ movp(rcx, FieldOperand(rcx, DescriptorArray::kEnumCacheOffset));
   1023   __ movp(rcx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset));
   1024 
   1025   // Set up the four remaining stack slots.
   1026   __ Push(rax);  // Map.
   1027   __ Push(rcx);  // Enumeration cache.
   1028   __ Push(rdx);  // Number of valid entries for the map in the enum cache.
   1029   __ Push(Smi::kZero);  // Initial index.
   1030   __ jmp(&loop);
   1031 
   1032   __ bind(&no_descriptors);
   1033   __ addp(rsp, Immediate(kPointerSize));
   1034   __ jmp(&exit);
   1035 
   1036   // We got a fixed array in register rax. Iterate through that.
   1037   __ bind(&fixed_array);
   1038 
   1039   __ movp(rcx, Operand(rsp, 0 * kPointerSize));  // Get enumerated object
   1040   __ Push(Smi::FromInt(1));                      // Smi(1) indicates slow check
   1041   __ Push(rax);  // Array
   1042   __ movp(rax, FieldOperand(rax, FixedArray::kLengthOffset));
   1043   __ Push(rax);  // Fixed array length (as smi).
   1044   PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
   1045   __ Push(Smi::kZero);  // Initial index.
   1046 
   1047   // Generate code for doing the condition check.
   1048   __ bind(&loop);
   1049   SetExpressionAsStatementPosition(stmt->each());
   1050 
   1051   __ movp(rax, Operand(rsp, 0 * kPointerSize));  // Get the current index.
   1052   __ cmpp(rax, Operand(rsp, 1 * kPointerSize));  // Compare to the array length.
   1053   __ j(above_equal, loop_statement.break_label());
   1054 
   1055   // Get the current entry of the array into register rax.
   1056   __ movp(rbx, Operand(rsp, 2 * kPointerSize));
   1057   SmiIndex index = masm()->SmiToIndex(rax, rax, kPointerSizeLog2);
   1058   __ movp(rax,
   1059           FieldOperand(rbx, index.reg, index.scale, FixedArray::kHeaderSize));
   1060 
   1061   // Get the expected map from the stack or a smi in the
   1062   // permanent slow case into register rdx.
   1063   __ movp(rdx, Operand(rsp, 3 * kPointerSize));
   1064 
   1065   // Check if the expected map still matches that of the enumerable.
   1066   // If not, we may have to filter the key.
   1067   Label update_each;
   1068   __ movp(rbx, Operand(rsp, 4 * kPointerSize));
   1069   __ cmpp(rdx, FieldOperand(rbx, HeapObject::kMapOffset));
   1070   __ j(equal, &update_each, Label::kNear);
   1071 
   1072   // We need to filter the key, record slow-path here.
   1073   int const vector_index = SmiFromSlot(slot)->value();
   1074   __ EmitLoadTypeFeedbackVector(rdx);
   1075   __ Move(FieldOperand(rdx, FixedArray::OffsetOfElementAt(vector_index)),
   1076           TypeFeedbackVector::MegamorphicSentinel(isolate()));
   1077 
   1078   // rax contains the key. The receiver in rbx is the second argument to
   1079   // ForInFilter. ForInFilter returns undefined if the receiver doesn't
   1080   // have the key or returns the name-converted key.
   1081   __ Call(isolate()->builtins()->ForInFilter(), RelocInfo::CODE_TARGET);
   1082   RestoreContext();
   1083   PrepareForBailoutForId(stmt->FilterId(), BailoutState::TOS_REGISTER);
   1084   __ JumpIfRoot(result_register(), Heap::kUndefinedValueRootIndex,
   1085                 loop_statement.continue_label());
   1086 
   1087   // Update the 'each' property or variable from the possibly filtered
   1088   // entry in register rax.
   1089   __ bind(&update_each);
   1090   // Perform the assignment as if via '='.
   1091   { EffectContext context(this);
   1092     EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
   1093     PrepareForBailoutForId(stmt->AssignmentId(), BailoutState::NO_REGISTERS);
   1094   }
   1095 
   1096   // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
   1097   PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS);
   1098   // Generate code for the body of the loop.
   1099   Visit(stmt->body());
   1100 
   1101   // Generate code for going to the next element by incrementing the
   1102   // index (smi) stored on top of the stack.
   1103   __ bind(loop_statement.continue_label());
   1104   PrepareForBailoutForId(stmt->IncrementId(), BailoutState::NO_REGISTERS);
   1105   __ SmiAddConstant(Operand(rsp, 0 * kPointerSize), Smi::FromInt(1));
   1106 
   1107   EmitBackEdgeBookkeeping(stmt, &loop);
   1108   __ jmp(&loop);
   1109 
   1110   // Remove the pointers stored on the stack.
   1111   __ bind(loop_statement.break_label());
   1112   DropOperands(5);
   1113 
   1114   // Exit and decrement the loop depth.
   1115   PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
   1116   __ bind(&exit);
   1117   decrement_loop_depth();
   1118 }
   1119 
   1120 
   1121 void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
   1122                                           FeedbackVectorSlot slot) {
   1123   DCHECK(NeedsHomeObject(initializer));
   1124   __ movp(StoreDescriptor::ReceiverRegister(), Operand(rsp, 0));
   1125   __ movp(StoreDescriptor::ValueRegister(),
   1126           Operand(rsp, offset * kPointerSize));
   1127   CallStoreIC(slot, isolate()->factory()->home_object_symbol());
   1128 }
   1129 
   1130 
   1131 void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
   1132                                                      int offset,
   1133                                                      FeedbackVectorSlot slot) {
   1134   DCHECK(NeedsHomeObject(initializer));
   1135   __ movp(StoreDescriptor::ReceiverRegister(), rax);
   1136   __ movp(StoreDescriptor::ValueRegister(),
   1137           Operand(rsp, offset * kPointerSize));
   1138   CallStoreIC(slot, isolate()->factory()->home_object_symbol());
   1139 }
   1140 
   1141 
   1142 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
   1143                                                       TypeofMode typeof_mode,
   1144                                                       Label* slow) {
   1145   Register context = rsi;
   1146   Register temp = rdx;
   1147 
   1148   int to_check = scope()->ContextChainLengthUntilOutermostSloppyEval();
   1149   for (Scope* s = scope(); to_check > 0; s = s->outer_scope()) {
   1150     if (!s->NeedsContext()) continue;
   1151     if (s->calls_sloppy_eval()) {
   1152       // Check that extension is "the hole".
   1153       __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX),
   1154                        Heap::kTheHoleValueRootIndex, slow);
   1155     }
   1156     // Load next context in chain.
   1157     __ movp(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
   1158     // Walk the rest of the chain without clobbering rsi.
   1159     context = temp;
   1160     to_check--;
   1161   }
   1162 
   1163   // All extension objects were empty and it is safe to use a normal global
   1164   // load machinery.
   1165   EmitGlobalVariableLoad(proxy, typeof_mode);
   1166 }
   1167 
   1168 
   1169 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
   1170                                                                 Label* slow) {
   1171   DCHECK(var->IsContextSlot());
   1172   Register context = rsi;
   1173   Register temp = rbx;
   1174 
   1175   for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
   1176     if (s->NeedsContext()) {
   1177       if (s->calls_sloppy_eval()) {
   1178         // Check that extension is "the hole".
   1179         __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX),
   1180                          Heap::kTheHoleValueRootIndex, slow);
   1181       }
   1182       __ movp(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
   1183       // Walk the rest of the chain without clobbering rsi.
   1184       context = temp;
   1185     }
   1186   }
   1187   // Check that last extension is "the hole".
   1188   __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX),
   1189                    Heap::kTheHoleValueRootIndex, slow);
   1190 
   1191   // This function is used only for loads, not stores, so it's safe to
   1192   // return an rsi-based operand (the write barrier cannot be allowed to
   1193   // destroy the rsi register).
   1194   return ContextOperand(context, var->index());
   1195 }
   1196 
   1197 
   1198 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
   1199                                                   TypeofMode typeof_mode,
   1200                                                   Label* slow, Label* done) {
   1201   // Generate fast-case code for variables that might be shadowed by
   1202   // eval-introduced variables.  Eval is used a lot without
   1203   // introducing variables.  In those cases, we do not want to
   1204   // perform a runtime call for all variables in the scope
   1205   // containing the eval.
   1206   Variable* var = proxy->var();
   1207   if (var->mode() == DYNAMIC_GLOBAL) {
   1208     EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
   1209     __ jmp(done);
   1210   } else if (var->mode() == DYNAMIC_LOCAL) {
   1211     Variable* local = var->local_if_not_shadowed();
   1212     __ movp(rax, ContextSlotOperandCheckExtensions(local, slow));
   1213     if (local->binding_needs_init()) {
   1214       __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
   1215       __ j(not_equal, done);
   1216       __ Push(var->name());
   1217       __ CallRuntime(Runtime::kThrowReferenceError);
   1218     } else {
   1219       __ jmp(done);
   1220     }
   1221   }
   1222 }
   1223 
   1224 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
   1225                                          TypeofMode typeof_mode) {
   1226   // Record position before possible IC call.
   1227   SetExpressionPosition(proxy);
   1228   PrepareForBailoutForId(proxy->BeforeId(), BailoutState::NO_REGISTERS);
   1229   Variable* var = proxy->var();
   1230 
   1231   // Three cases: global variables, lookup variables, and all other types of
   1232   // variables.
   1233   switch (var->location()) {
   1234     case VariableLocation::UNALLOCATED: {
   1235       Comment cmnt(masm_, "[ Global variable");
   1236       EmitGlobalVariableLoad(proxy, typeof_mode);
   1237       context()->Plug(rax);
   1238       break;
   1239     }
   1240 
   1241     case VariableLocation::PARAMETER:
   1242     case VariableLocation::LOCAL:
   1243     case VariableLocation::CONTEXT: {
   1244       DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
   1245       Comment cmnt(masm_, var->IsContextSlot() ? "[ Context slot"
   1246                                                : "[ Stack slot");
   1247       if (proxy->hole_check_mode() == HoleCheckMode::kRequired) {
   1248         // Throw a reference error when using an uninitialized let/const
   1249         // binding in harmony mode.
   1250         DCHECK(IsLexicalVariableMode(var->mode()));
   1251         Label done;
   1252         GetVar(rax, var);
   1253         __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
   1254         __ j(not_equal, &done, Label::kNear);
   1255         __ Push(var->name());
   1256         __ CallRuntime(Runtime::kThrowReferenceError);
   1257         __ bind(&done);
   1258         context()->Plug(rax);
   1259         break;
   1260       }
   1261       context()->Plug(var);
   1262       break;
   1263     }
   1264 
   1265     case VariableLocation::LOOKUP: {
   1266       Comment cmnt(masm_, "[ Lookup slot");
   1267       Label done, slow;
   1268       // Generate code for loading from variables potentially shadowed
   1269       // by eval-introduced variables.
   1270       EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
   1271       __ bind(&slow);
   1272       __ Push(var->name());
   1273       Runtime::FunctionId function_id =
   1274           typeof_mode == NOT_INSIDE_TYPEOF
   1275               ? Runtime::kLoadLookupSlot
   1276               : Runtime::kLoadLookupSlotInsideTypeof;
   1277       __ CallRuntime(function_id);
   1278       __ bind(&done);
   1279       context()->Plug(rax);
   1280       break;
   1281     }
   1282 
   1283     case VariableLocation::MODULE:
   1284       UNREACHABLE();
   1285   }
   1286 }
   1287 
   1288 
   1289 void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
   1290   Expression* expression = (property == NULL) ? NULL : property->value();
   1291   if (expression == NULL) {
   1292     OperandStackDepthIncrement(1);
   1293     __ PushRoot(Heap::kNullValueRootIndex);
   1294   } else {
   1295     VisitForStackValue(expression);
   1296     if (NeedsHomeObject(expression)) {
   1297       DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
   1298              property->kind() == ObjectLiteral::Property::SETTER);
   1299       int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
   1300       EmitSetHomeObject(expression, offset, property->GetSlot());
   1301     }
   1302   }
   1303 }
   1304 
   1305 
   1306 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
   1307   Comment cmnt(masm_, "[ ObjectLiteral");
   1308 
   1309   Handle<FixedArray> constant_properties = expr->constant_properties();
   1310   int flags = expr->ComputeFlags();
   1311   if (MustCreateObjectLiteralWithRuntime(expr)) {
   1312     __ Push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
   1313     __ Push(Smi::FromInt(expr->literal_index()));
   1314     __ Push(constant_properties);
   1315     __ Push(Smi::FromInt(flags));
   1316     __ CallRuntime(Runtime::kCreateObjectLiteral);
   1317   } else {
   1318     __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
   1319     __ Move(rbx, Smi::FromInt(expr->literal_index()));
   1320     __ Move(rcx, constant_properties);
   1321     __ Move(rdx, Smi::FromInt(flags));
   1322     FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
   1323     __ CallStub(&stub);
   1324     RestoreContext();
   1325   }
   1326   PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
   1327 
   1328   // If result_saved is true the result is on top of the stack.  If
   1329   // result_saved is false the result is in rax.
   1330   bool result_saved = false;
   1331 
   1332   AccessorTable accessor_table(zone());
   1333   int property_index = 0;
   1334   for (; property_index < expr->properties()->length(); property_index++) {
   1335     ObjectLiteral::Property* property = expr->properties()->at(property_index);
   1336     if (property->is_computed_name()) break;
   1337     if (property->IsCompileTimeValue()) continue;
   1338 
   1339     Literal* key = property->key()->AsLiteral();
   1340     Expression* value = property->value();
   1341     if (!result_saved) {
   1342       PushOperand(rax);  // Save result on the stack
   1343       result_saved = true;
   1344     }
   1345     switch (property->kind()) {
   1346       case ObjectLiteral::Property::CONSTANT:
   1347         UNREACHABLE();
   1348       case ObjectLiteral::Property::MATERIALIZED_LITERAL:
   1349         DCHECK(!CompileTimeValue::IsCompileTimeValue(value));
   1350         // Fall through.
   1351       case ObjectLiteral::Property::COMPUTED:
   1352         // It is safe to use [[Put]] here because the boilerplate already
   1353         // contains computed properties with an uninitialized value.
   1354         if (key->IsStringLiteral()) {
   1355           DCHECK(key->IsPropertyName());
   1356           if (property->emit_store()) {
   1357             VisitForAccumulatorValue(value);
   1358             DCHECK(StoreDescriptor::ValueRegister().is(rax));
   1359             __ movp(StoreDescriptor::ReceiverRegister(), Operand(rsp, 0));
   1360             CallStoreIC(property->GetSlot(0), key->value());
   1361             PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
   1362 
   1363             if (NeedsHomeObject(value)) {
   1364               EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
   1365             }
   1366           } else {
   1367             VisitForEffect(value);
   1368           }
   1369           break;
   1370         }
   1371         PushOperand(Operand(rsp, 0));  // Duplicate receiver.
   1372         VisitForStackValue(key);
   1373         VisitForStackValue(value);
   1374         if (property->emit_store()) {
   1375           if (NeedsHomeObject(value)) {
   1376             EmitSetHomeObject(value, 2, property->GetSlot());
   1377           }
   1378           PushOperand(Smi::FromInt(SLOPPY));  // Language mode
   1379           CallRuntimeWithOperands(Runtime::kSetProperty);
   1380         } else {
   1381           DropOperands(3);
   1382         }
   1383         break;
   1384       case ObjectLiteral::Property::PROTOTYPE:
   1385         PushOperand(Operand(rsp, 0));  // Duplicate receiver.
   1386         VisitForStackValue(value);
   1387         DCHECK(property->emit_store());
   1388         CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
   1389         PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
   1390                                BailoutState::NO_REGISTERS);
   1391         break;
   1392       case ObjectLiteral::Property::GETTER:
   1393         if (property->emit_store()) {
   1394           AccessorTable::Iterator it = accessor_table.lookup(key);
   1395           it->second->bailout_id = expr->GetIdForPropertySet(property_index);
   1396           it->second->getter = property;
   1397         }
   1398         break;
   1399       case ObjectLiteral::Property::SETTER:
   1400         if (property->emit_store()) {
   1401           AccessorTable::Iterator it = accessor_table.lookup(key);
   1402           it->second->bailout_id = expr->GetIdForPropertySet(property_index);
   1403           it->second->setter = property;
   1404         }
   1405         break;
   1406     }
   1407   }
   1408 
   1409   // Emit code to define accessors, using only a single call to the runtime for
   1410   // each pair of corresponding getters and setters.
   1411   for (AccessorTable::Iterator it = accessor_table.begin();
   1412        it != accessor_table.end();
   1413        ++it) {
   1414     PushOperand(Operand(rsp, 0));  // Duplicate receiver.
   1415     VisitForStackValue(it->first);
   1416     EmitAccessor(it->second->getter);
   1417     EmitAccessor(it->second->setter);
   1418     PushOperand(Smi::FromInt(NONE));
   1419     CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
   1420     PrepareForBailoutForId(it->second->bailout_id, BailoutState::NO_REGISTERS);
   1421   }
   1422 
   1423   // Object literals have two parts. The "static" part on the left contains no
   1424   // computed property names, and so we can compute its map ahead of time; see
   1425   // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
   1426   // starts with the first computed property name, and continues with all
   1427   // properties to its right.  All the code from above initializes the static
   1428   // component of the object literal, and arranges for the map of the result to
   1429   // reflect the static order in which the keys appear. For the dynamic
   1430   // properties, we compile them into a series of "SetOwnProperty" runtime
   1431   // calls. This will preserve insertion order.
   1432   for (; property_index < expr->properties()->length(); property_index++) {
   1433     ObjectLiteral::Property* property = expr->properties()->at(property_index);
   1434 
   1435     Expression* value = property->value();
   1436     if (!result_saved) {
   1437       PushOperand(rax);  // Save result on the stack
   1438       result_saved = true;
   1439     }
   1440 
   1441     PushOperand(Operand(rsp, 0));  // Duplicate receiver.
   1442 
   1443     if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
   1444       DCHECK(!property->is_computed_name());
   1445       VisitForStackValue(value);
   1446       DCHECK(property->emit_store());
   1447       CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
   1448       PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
   1449                              BailoutState::NO_REGISTERS);
   1450     } else {
   1451       EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
   1452       VisitForStackValue(value);
   1453       if (NeedsHomeObject(value)) {
   1454         EmitSetHomeObject(value, 2, property->GetSlot());
   1455       }
   1456 
   1457       switch (property->kind()) {
   1458         case ObjectLiteral::Property::CONSTANT:
   1459         case ObjectLiteral::Property::MATERIALIZED_LITERAL:
   1460         case ObjectLiteral::Property::COMPUTED:
   1461           if (property->emit_store()) {
   1462             PushOperand(Smi::FromInt(NONE));
   1463             PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
   1464             CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
   1465             PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
   1466                                    BailoutState::NO_REGISTERS);
   1467           } else {
   1468             DropOperands(3);
   1469           }
   1470           break;
   1471 
   1472         case ObjectLiteral::Property::PROTOTYPE:
   1473           UNREACHABLE();
   1474           break;
   1475 
   1476         case ObjectLiteral::Property::GETTER:
   1477           PushOperand(Smi::FromInt(NONE));
   1478           CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
   1479           break;
   1480 
   1481         case ObjectLiteral::Property::SETTER:
   1482           PushOperand(Smi::FromInt(NONE));
   1483           CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
   1484           break;
   1485       }
   1486     }
   1487   }
   1488 
   1489   if (result_saved) {
   1490     context()->PlugTOS();
   1491   } else {
   1492     context()->Plug(rax);
   1493   }
   1494 }
   1495 
   1496 
   1497 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
   1498   Comment cmnt(masm_, "[ ArrayLiteral");
   1499 
   1500   Handle<FixedArray> constant_elements = expr->constant_elements();
   1501   bool has_constant_fast_elements =
   1502       IsFastObjectElementsKind(expr->constant_elements_kind());
   1503 
   1504   AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
   1505   if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) {
   1506     // If the only customer of allocation sites is transitioning, then
   1507     // we can turn it off if we don't have anywhere else to transition to.
   1508     allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
   1509   }
   1510 
   1511   if (MustCreateArrayLiteralWithRuntime(expr)) {
   1512     __ Push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
   1513     __ Push(Smi::FromInt(expr->literal_index()));
   1514     __ Push(constant_elements);
   1515     __ Push(Smi::FromInt(expr->ComputeFlags()));
   1516     __ CallRuntime(Runtime::kCreateArrayLiteral);
   1517   } else {
   1518     __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
   1519     __ Move(rbx, Smi::FromInt(expr->literal_index()));
   1520     __ Move(rcx, constant_elements);
   1521     FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
   1522     __ CallStub(&stub);
   1523     RestoreContext();
   1524   }
   1525   PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
   1526 
   1527   bool result_saved = false;  // Is the result saved to the stack?
   1528   ZoneList<Expression*>* subexprs = expr->values();
   1529   int length = subexprs->length();
   1530 
   1531   // Emit code to evaluate all the non-constant subexpressions and to store
   1532   // them into the newly cloned array.
   1533   for (int array_index = 0; array_index < length; array_index++) {
   1534     Expression* subexpr = subexprs->at(array_index);
   1535     DCHECK(!subexpr->IsSpread());
   1536 
   1537     // If the subexpression is a literal or a simple materialized literal it
   1538     // is already set in the cloned array.
   1539     if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
   1540 
   1541     if (!result_saved) {
   1542       PushOperand(rax);  // array literal
   1543       result_saved = true;
   1544     }
   1545     VisitForAccumulatorValue(subexpr);
   1546 
   1547     __ Move(StoreDescriptor::NameRegister(), Smi::FromInt(array_index));
   1548     __ movp(StoreDescriptor::ReceiverRegister(), Operand(rsp, 0));
   1549     CallKeyedStoreIC(expr->LiteralFeedbackSlot());
   1550 
   1551     PrepareForBailoutForId(expr->GetIdForElement(array_index),
   1552                            BailoutState::NO_REGISTERS);
   1553   }
   1554 
   1555   if (result_saved) {
   1556     context()->PlugTOS();
   1557   } else {
   1558     context()->Plug(rax);
   1559   }
   1560 }
   1561 
   1562 
   1563 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
   1564   DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
   1565 
   1566   Comment cmnt(masm_, "[ Assignment");
   1567 
   1568   Property* property = expr->target()->AsProperty();
   1569   LhsKind assign_type = Property::GetAssignType(property);
   1570 
   1571   // Evaluate LHS expression.
   1572   switch (assign_type) {
   1573     case VARIABLE:
   1574       // Nothing to do here.
   1575       break;
   1576     case NAMED_PROPERTY:
   1577       if (expr->is_compound()) {
   1578         // We need the receiver both on the stack and in the register.
   1579         VisitForStackValue(property->obj());
   1580         __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
   1581       } else {
   1582         VisitForStackValue(property->obj());
   1583       }
   1584       break;
   1585     case NAMED_SUPER_PROPERTY:
   1586       VisitForStackValue(
   1587           property->obj()->AsSuperPropertyReference()->this_var());
   1588       VisitForAccumulatorValue(
   1589           property->obj()->AsSuperPropertyReference()->home_object());
   1590       PushOperand(result_register());
   1591       if (expr->is_compound()) {
   1592         PushOperand(MemOperand(rsp, kPointerSize));
   1593         PushOperand(result_register());
   1594       }
   1595       break;
   1596     case KEYED_SUPER_PROPERTY:
   1597       VisitForStackValue(
   1598           property->obj()->AsSuperPropertyReference()->this_var());
   1599       VisitForStackValue(
   1600           property->obj()->AsSuperPropertyReference()->home_object());
   1601       VisitForAccumulatorValue(property->key());
   1602       PushOperand(result_register());
   1603       if (expr->is_compound()) {
   1604         PushOperand(MemOperand(rsp, 2 * kPointerSize));
   1605         PushOperand(MemOperand(rsp, 2 * kPointerSize));
   1606         PushOperand(result_register());
   1607       }
   1608       break;
   1609     case KEYED_PROPERTY: {
   1610       if (expr->is_compound()) {
   1611         VisitForStackValue(property->obj());
   1612         VisitForStackValue(property->key());
   1613         __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, kPointerSize));
   1614         __ movp(LoadDescriptor::NameRegister(), Operand(rsp, 0));
   1615       } else {
   1616         VisitForStackValue(property->obj());
   1617         VisitForStackValue(property->key());
   1618       }
   1619       break;
   1620     }
   1621   }
   1622 
   1623   // For compound assignments we need another deoptimization point after the
   1624   // variable/property load.
   1625   if (expr->is_compound()) {
   1626     { AccumulatorValueContext context(this);
   1627       switch (assign_type) {
   1628         case VARIABLE:
   1629           EmitVariableLoad(expr->target()->AsVariableProxy());
   1630           PrepareForBailout(expr->target(), BailoutState::TOS_REGISTER);
   1631           break;
   1632         case NAMED_PROPERTY:
   1633           EmitNamedPropertyLoad(property);
   1634           PrepareForBailoutForId(property->LoadId(),
   1635                                  BailoutState::TOS_REGISTER);
   1636           break;
   1637         case NAMED_SUPER_PROPERTY:
   1638           EmitNamedSuperPropertyLoad(property);
   1639           PrepareForBailoutForId(property->LoadId(),
   1640                                  BailoutState::TOS_REGISTER);
   1641           break;
   1642         case KEYED_SUPER_PROPERTY:
   1643           EmitKeyedSuperPropertyLoad(property);
   1644           PrepareForBailoutForId(property->LoadId(),
   1645                                  BailoutState::TOS_REGISTER);
   1646           break;
   1647         case KEYED_PROPERTY:
   1648           EmitKeyedPropertyLoad(property);
   1649           PrepareForBailoutForId(property->LoadId(),
   1650                                  BailoutState::TOS_REGISTER);
   1651           break;
   1652       }
   1653     }
   1654 
   1655     Token::Value op = expr->binary_op();
   1656     PushOperand(rax);  // Left operand goes on the stack.
   1657     VisitForAccumulatorValue(expr->value());
   1658 
   1659     AccumulatorValueContext context(this);
   1660     if (ShouldInlineSmiCase(op)) {
   1661       EmitInlineSmiBinaryOp(expr->binary_operation(),
   1662                             op,
   1663                             expr->target(),
   1664                             expr->value());
   1665     } else {
   1666       EmitBinaryOp(expr->binary_operation(), op);
   1667     }
   1668     // Deoptimization point in case the binary operation may have side effects.
   1669     PrepareForBailout(expr->binary_operation(), BailoutState::TOS_REGISTER);
   1670   } else {
   1671     VisitForAccumulatorValue(expr->value());
   1672   }
   1673 
   1674   SetExpressionPosition(expr);
   1675 
   1676   // Store the value.
   1677   switch (assign_type) {
   1678     case VARIABLE: {
   1679       VariableProxy* proxy = expr->target()->AsVariableProxy();
   1680       EmitVariableAssignment(proxy->var(), expr->op(), expr->AssignmentSlot(),
   1681                              proxy->hole_check_mode());
   1682       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
   1683       context()->Plug(rax);
   1684       break;
   1685     }
   1686     case NAMED_PROPERTY:
   1687       EmitNamedPropertyAssignment(expr);
   1688       break;
   1689     case NAMED_SUPER_PROPERTY:
   1690       EmitNamedSuperPropertyStore(property);
   1691       context()->Plug(rax);
   1692       break;
   1693     case KEYED_SUPER_PROPERTY:
   1694       EmitKeyedSuperPropertyStore(property);
   1695       context()->Plug(rax);
   1696       break;
   1697     case KEYED_PROPERTY:
   1698       EmitKeyedPropertyAssignment(expr);
   1699       break;
   1700   }
   1701 }
   1702 
   1703 
   1704 void FullCodeGenerator::VisitYield(Yield* expr) {
   1705   Comment cmnt(masm_, "[ Yield");
   1706   SetExpressionPosition(expr);
   1707 
   1708   // Evaluate yielded value first; the initial iterator definition depends on
   1709   // this.  It stays on the stack while we update the iterator.
   1710   VisitForStackValue(expr->expression());
   1711 
   1712   Label suspend, continuation, post_runtime, resume, exception;
   1713 
   1714   __ jmp(&suspend);
   1715   __ bind(&continuation);
   1716   // When we arrive here, rax holds the generator object.
   1717   __ RecordGeneratorContinuation();
   1718   __ movp(rbx, FieldOperand(rax, JSGeneratorObject::kResumeModeOffset));
   1719   __ movp(rax, FieldOperand(rax, JSGeneratorObject::kInputOrDebugPosOffset));
   1720   STATIC_ASSERT(JSGeneratorObject::kNext < JSGeneratorObject::kReturn);
   1721   STATIC_ASSERT(JSGeneratorObject::kThrow > JSGeneratorObject::kReturn);
   1722   __ SmiCompare(rbx, Smi::FromInt(JSGeneratorObject::kReturn));
   1723   __ j(less, &resume);
   1724   __ Push(result_register());
   1725   __ j(greater, &exception);
   1726   EmitCreateIteratorResult(true);
   1727   EmitUnwindAndReturn();
   1728 
   1729   __ bind(&exception);
   1730   __ CallRuntime(expr->rethrow_on_exception() ? Runtime::kReThrow
   1731                                               : Runtime::kThrow);
   1732 
   1733   __ bind(&suspend);
   1734   OperandStackDepthIncrement(1);  // Not popped on this path.
   1735   VisitForAccumulatorValue(expr->generator_object());
   1736   DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
   1737   __ Move(FieldOperand(rax, JSGeneratorObject::kContinuationOffset),
   1738           Smi::FromInt(continuation.pos()));
   1739   __ movp(FieldOperand(rax, JSGeneratorObject::kContextOffset), rsi);
   1740   __ movp(rcx, rsi);
   1741   __ RecordWriteField(rax, JSGeneratorObject::kContextOffset, rcx, rdx,
   1742                       kDontSaveFPRegs);
   1743   __ leap(rbx, Operand(rbp, StandardFrameConstants::kExpressionsOffset));
   1744   __ cmpp(rsp, rbx);
   1745   __ j(equal, &post_runtime);
   1746   __ Push(rax);  // generator object
   1747   __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
   1748   RestoreContext();
   1749   __ bind(&post_runtime);
   1750 
   1751   PopOperand(result_register());
   1752   EmitReturnSequence();
   1753 
   1754   __ bind(&resume);
   1755   context()->Plug(result_register());
   1756 }
   1757 
   1758 void FullCodeGenerator::PushOperand(MemOperand operand) {
   1759   OperandStackDepthIncrement(1);
   1760   __ Push(operand);
   1761 }
   1762 
   1763 void FullCodeGenerator::EmitOperandStackDepthCheck() {
   1764   if (FLAG_debug_code) {
   1765     int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
   1766                         operand_stack_depth_ * kPointerSize;
   1767     __ movp(rax, rbp);
   1768     __ subp(rax, rsp);
   1769     __ cmpp(rax, Immediate(expected_diff));
   1770     __ Assert(equal, kUnexpectedStackDepth);
   1771   }
   1772 }
   1773 
   1774 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
   1775   Label allocate, done_allocate;
   1776 
   1777   __ Allocate(JSIteratorResult::kSize, rax, rcx, rdx, &allocate,
   1778               NO_ALLOCATION_FLAGS);
   1779   __ jmp(&done_allocate, Label::kNear);
   1780 
   1781   __ bind(&allocate);
   1782   __ Push(Smi::FromInt(JSIteratorResult::kSize));
   1783   __ CallRuntime(Runtime::kAllocateInNewSpace);
   1784 
   1785   __ bind(&done_allocate);
   1786   __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, rbx);
   1787   __ movp(FieldOperand(rax, HeapObject::kMapOffset), rbx);
   1788   __ LoadRoot(rbx, Heap::kEmptyFixedArrayRootIndex);
   1789   __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), rbx);
   1790   __ movp(FieldOperand(rax, JSObject::kElementsOffset), rbx);
   1791   __ Pop(FieldOperand(rax, JSIteratorResult::kValueOffset));
   1792   __ LoadRoot(FieldOperand(rax, JSIteratorResult::kDoneOffset),
   1793               done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
   1794   STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
   1795   OperandStackDepthDecrement(1);
   1796 }
   1797 
   1798 
   1799 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
   1800                                               Token::Value op,
   1801                                               Expression* left,
   1802                                               Expression* right) {
   1803   // Do combined smi check of the operands. Left operand is on the
   1804   // stack (popped into rdx). Right operand is in rax but moved into
   1805   // rcx to make the shifts easier.
   1806   Label done, stub_call, smi_case;
   1807   PopOperand(rdx);
   1808   __ movp(rcx, rax);
   1809   __ orp(rax, rdx);
   1810   JumpPatchSite patch_site(masm_);
   1811   patch_site.EmitJumpIfSmi(rax, &smi_case, Label::kNear);
   1812 
   1813   __ bind(&stub_call);
   1814   __ movp(rax, rcx);
   1815   Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
   1816   CallIC(code, expr->BinaryOperationFeedbackId());
   1817   patch_site.EmitPatchInfo();
   1818   __ jmp(&done, Label::kNear);
   1819 
   1820   __ bind(&smi_case);
   1821   switch (op) {
   1822     case Token::SAR:
   1823       __ SmiShiftArithmeticRight(rax, rdx, rcx);
   1824       break;
   1825     case Token::SHL:
   1826       __ SmiShiftLeft(rax, rdx, rcx, &stub_call);
   1827       break;
   1828     case Token::SHR:
   1829       __ SmiShiftLogicalRight(rax, rdx, rcx, &stub_call);
   1830       break;
   1831     case Token::ADD:
   1832       __ SmiAdd(rax, rdx, rcx, &stub_call);
   1833       break;
   1834     case Token::SUB:
   1835       __ SmiSub(rax, rdx, rcx, &stub_call);
   1836       break;
   1837     case Token::MUL:
   1838       __ SmiMul(rax, rdx, rcx, &stub_call);
   1839       break;
   1840     case Token::BIT_OR:
   1841       __ SmiOr(rax, rdx, rcx);
   1842       break;
   1843     case Token::BIT_AND:
   1844       __ SmiAnd(rax, rdx, rcx);
   1845       break;
   1846     case Token::BIT_XOR:
   1847       __ SmiXor(rax, rdx, rcx);
   1848       break;
   1849     default:
   1850       UNREACHABLE();
   1851       break;
   1852   }
   1853 
   1854   __ bind(&done);
   1855   context()->Plug(rax);
   1856 }
   1857 
   1858 
   1859 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
   1860   for (int i = 0; i < lit->properties()->length(); i++) {
   1861     ClassLiteral::Property* property = lit->properties()->at(i);
   1862     Expression* value = property->value();
   1863 
   1864     if (property->is_static()) {
   1865       PushOperand(Operand(rsp, kPointerSize));  // constructor
   1866     } else {
   1867       PushOperand(Operand(rsp, 0));  // prototype
   1868     }
   1869     EmitPropertyKey(property, lit->GetIdForProperty(i));
   1870 
   1871     // The static prototype property is read only. We handle the non computed
   1872     // property name case in the parser. Since this is the only case where we
   1873     // need to check for an own read only property we special case this so we do
   1874     // not need to do this for every property.
   1875     if (property->is_static() && property->is_computed_name()) {
   1876       __ CallRuntime(Runtime::kThrowIfStaticPrototype);
   1877       __ Push(rax);
   1878     }
   1879 
   1880     VisitForStackValue(value);
   1881     if (NeedsHomeObject(value)) {
   1882       EmitSetHomeObject(value, 2, property->GetSlot());
   1883     }
   1884 
   1885     switch (property->kind()) {
   1886       case ClassLiteral::Property::METHOD:
   1887         PushOperand(Smi::FromInt(DONT_ENUM));
   1888         PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
   1889         CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
   1890         break;
   1891 
   1892       case ClassLiteral::Property::GETTER:
   1893         PushOperand(Smi::FromInt(DONT_ENUM));
   1894         CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
   1895         break;
   1896 
   1897       case ClassLiteral::Property::SETTER:
   1898         PushOperand(Smi::FromInt(DONT_ENUM));
   1899         CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
   1900         break;
   1901 
   1902       case ClassLiteral::Property::FIELD:
   1903       default:
   1904         UNREACHABLE();
   1905     }
   1906   }
   1907 }
   1908 
   1909 
   1910 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
   1911   PopOperand(rdx);
   1912   Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
   1913   JumpPatchSite patch_site(masm_);    // unbound, signals no inlined smi code.
   1914   CallIC(code, expr->BinaryOperationFeedbackId());
   1915   patch_site.EmitPatchInfo();
   1916   context()->Plug(rax);
   1917 }
   1918 
   1919 
   1920 void FullCodeGenerator::EmitAssignment(Expression* expr,
   1921                                        FeedbackVectorSlot slot) {
   1922   DCHECK(expr->IsValidReferenceExpressionOrThis());
   1923 
   1924   Property* prop = expr->AsProperty();
   1925   LhsKind assign_type = Property::GetAssignType(prop);
   1926 
   1927   switch (assign_type) {
   1928     case VARIABLE: {
   1929       VariableProxy* proxy = expr->AsVariableProxy();
   1930       EffectContext context(this);
   1931       EmitVariableAssignment(proxy->var(), Token::ASSIGN, slot,
   1932                              proxy->hole_check_mode());
   1933       break;
   1934     }
   1935     case NAMED_PROPERTY: {
   1936       PushOperand(rax);  // Preserve value.
   1937       VisitForAccumulatorValue(prop->obj());
   1938       __ Move(StoreDescriptor::ReceiverRegister(), rax);
   1939       PopOperand(StoreDescriptor::ValueRegister());  // Restore value.
   1940       CallStoreIC(slot, prop->key()->AsLiteral()->value());
   1941       break;
   1942     }
   1943     case NAMED_SUPER_PROPERTY: {
   1944       PushOperand(rax);
   1945       VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
   1946       VisitForAccumulatorValue(
   1947           prop->obj()->AsSuperPropertyReference()->home_object());
   1948       // stack: value, this; rax: home_object
   1949       Register scratch = rcx;
   1950       Register scratch2 = rdx;
   1951       __ Move(scratch, result_register());               // home_object
   1952       __ movp(rax, MemOperand(rsp, kPointerSize));       // value
   1953       __ movp(scratch2, MemOperand(rsp, 0));             // this
   1954       __ movp(MemOperand(rsp, kPointerSize), scratch2);  // this
   1955       __ movp(MemOperand(rsp, 0), scratch);              // home_object
   1956       // stack: this, home_object; rax: value
   1957       EmitNamedSuperPropertyStore(prop);
   1958       break;
   1959     }
   1960     case KEYED_SUPER_PROPERTY: {
   1961       PushOperand(rax);
   1962       VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
   1963       VisitForStackValue(
   1964           prop->obj()->AsSuperPropertyReference()->home_object());
   1965       VisitForAccumulatorValue(prop->key());
   1966       Register scratch = rcx;
   1967       Register scratch2 = rdx;
   1968       __ movp(scratch2, MemOperand(rsp, 2 * kPointerSize));  // value
   1969       // stack: value, this, home_object; rax: key, rdx: value
   1970       __ movp(scratch, MemOperand(rsp, kPointerSize));  // this
   1971       __ movp(MemOperand(rsp, 2 * kPointerSize), scratch);
   1972       __ movp(scratch, MemOperand(rsp, 0));  // home_object
   1973       __ movp(MemOperand(rsp, kPointerSize), scratch);
   1974       __ movp(MemOperand(rsp, 0), rax);
   1975       __ Move(rax, scratch2);
   1976       // stack: this, home_object, key; rax: value.
   1977       EmitKeyedSuperPropertyStore(prop);
   1978       break;
   1979     }
   1980     case KEYED_PROPERTY: {
   1981       PushOperand(rax);  // Preserve value.
   1982       VisitForStackValue(prop->obj());
   1983       VisitForAccumulatorValue(prop->key());
   1984       __ Move(StoreDescriptor::NameRegister(), rax);
   1985       PopOperand(StoreDescriptor::ReceiverRegister());
   1986       PopOperand(StoreDescriptor::ValueRegister());  // Restore value.
   1987       CallKeyedStoreIC(slot);
   1988       break;
   1989     }
   1990   }
   1991   context()->Plug(rax);
   1992 }
   1993 
   1994 
   1995 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
   1996     Variable* var, MemOperand location) {
   1997   __ movp(location, rax);
   1998   if (var->IsContextSlot()) {
   1999     __ movp(rdx, rax);
   2000     __ RecordWriteContextSlot(
   2001         rcx, Context::SlotOffset(var->index()), rdx, rbx, kDontSaveFPRegs);
   2002   }
   2003 }
   2004 
   2005 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
   2006                                                FeedbackVectorSlot slot,
   2007                                                HoleCheckMode hole_check_mode) {
   2008   if (var->IsUnallocated()) {
   2009     // Global var, const, or let.
   2010     __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
   2011     CallStoreIC(slot, var->name());
   2012 
   2013   } else if (IsLexicalVariableMode(var->mode()) && op != Token::INIT) {
   2014     DCHECK(!var->IsLookupSlot());
   2015     DCHECK(var->IsStackAllocated() || var->IsContextSlot());
   2016     MemOperand location = VarOperand(var, rcx);
   2017     // Perform an initialization check for lexically declared variables.
   2018     if (hole_check_mode == HoleCheckMode::kRequired) {
   2019       Label assign;
   2020       __ movp(rdx, location);
   2021       __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
   2022       __ j(not_equal, &assign, Label::kNear);
   2023       __ Push(var->name());
   2024       __ CallRuntime(Runtime::kThrowReferenceError);
   2025       __ bind(&assign);
   2026     }
   2027     if (var->mode() != CONST) {
   2028       EmitStoreToStackLocalOrContextSlot(var, location);
   2029     } else if (var->throw_on_const_assignment(language_mode())) {
   2030       __ CallRuntime(Runtime::kThrowConstAssignError);
   2031     }
   2032 
   2033   } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
   2034     // Initializing assignment to const {this} needs a write barrier.
   2035     DCHECK(var->IsStackAllocated() || var->IsContextSlot());
   2036     Label uninitialized_this;
   2037     MemOperand location = VarOperand(var, rcx);
   2038     __ movp(rdx, location);
   2039     __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
   2040     __ j(equal, &uninitialized_this);
   2041     __ Push(var->name());
   2042     __ CallRuntime(Runtime::kThrowReferenceError);
   2043     __ bind(&uninitialized_this);
   2044     EmitStoreToStackLocalOrContextSlot(var, location);
   2045 
   2046   } else {
   2047     DCHECK(var->mode() != CONST || op == Token::INIT);
   2048     if (var->IsLookupSlot()) {
   2049       // Assignment to var.
   2050       __ Push(var->name());
   2051       __ Push(rax);
   2052       __ CallRuntime(is_strict(language_mode())
   2053                          ? Runtime::kStoreLookupSlot_Strict
   2054                          : Runtime::kStoreLookupSlot_Sloppy);
   2055     } else {
   2056       // Assignment to var or initializing assignment to let/const in harmony
   2057       // mode.
   2058       DCHECK(var->IsStackAllocated() || var->IsContextSlot());
   2059       MemOperand location = VarOperand(var, rcx);
   2060       if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
   2061         // Check for an uninitialized let binding.
   2062         __ movp(rdx, location);
   2063         __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
   2064         __ Check(equal, kLetBindingReInitialization);
   2065       }
   2066       EmitStoreToStackLocalOrContextSlot(var, location);
   2067     }
   2068   }
   2069 }
   2070 
   2071 
   2072 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
   2073   // Assignment to a property, using a named store IC.
   2074   Property* prop = expr->target()->AsProperty();
   2075   DCHECK(prop != NULL);
   2076   DCHECK(prop->key()->IsLiteral());
   2077 
   2078   PopOperand(StoreDescriptor::ReceiverRegister());
   2079   CallStoreIC(expr->AssignmentSlot(), prop->key()->AsLiteral()->value());
   2080 
   2081   PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
   2082   context()->Plug(rax);
   2083 }
   2084 
   2085 
   2086 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
   2087   // Assignment to named property of super.
   2088   // rax : value
   2089   // stack : receiver ('this'), home_object
   2090   DCHECK(prop != NULL);
   2091   Literal* key = prop->key()->AsLiteral();
   2092   DCHECK(key != NULL);
   2093 
   2094   PushOperand(key->value());
   2095   PushOperand(rax);
   2096   CallRuntimeWithOperands(is_strict(language_mode())
   2097                               ? Runtime::kStoreToSuper_Strict
   2098                               : Runtime::kStoreToSuper_Sloppy);
   2099 }
   2100 
   2101 
   2102 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
   2103   // Assignment to named property of super.
   2104   // rax : value
   2105   // stack : receiver ('this'), home_object, key
   2106   DCHECK(prop != NULL);
   2107 
   2108   PushOperand(rax);
   2109   CallRuntimeWithOperands(is_strict(language_mode())
   2110                               ? Runtime::kStoreKeyedToSuper_Strict
   2111                               : Runtime::kStoreKeyedToSuper_Sloppy);
   2112 }
   2113 
   2114 
   2115 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
   2116   // Assignment to a property, using a keyed store IC.
   2117   PopOperand(StoreDescriptor::NameRegister());  // Key.
   2118   PopOperand(StoreDescriptor::ReceiverRegister());
   2119   DCHECK(StoreDescriptor::ValueRegister().is(rax));
   2120   CallKeyedStoreIC(expr->AssignmentSlot());
   2121 
   2122   PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
   2123   context()->Plug(rax);
   2124 }
   2125 
   2126 // Code common for calls using the IC.
   2127 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
   2128   Expression* callee = expr->expression();
   2129 
   2130   // Get the target function.
   2131   ConvertReceiverMode convert_mode;
   2132   if (callee->IsVariableProxy()) {
   2133     { StackValueContext context(this);
   2134       EmitVariableLoad(callee->AsVariableProxy());
   2135       PrepareForBailout(callee, BailoutState::NO_REGISTERS);
   2136     }
   2137     // Push undefined as receiver. This is patched in the Call builtin if it
   2138     // is a sloppy mode method.
   2139     PushOperand(isolate()->factory()->undefined_value());
   2140     convert_mode = ConvertReceiverMode::kNullOrUndefined;
   2141   } else {
   2142     // Load the function from the receiver.
   2143     DCHECK(callee->IsProperty());
   2144     DCHECK(!callee->AsProperty()->IsSuperAccess());
   2145     __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
   2146     EmitNamedPropertyLoad(callee->AsProperty());
   2147     PrepareForBailoutForId(callee->AsProperty()->LoadId(),
   2148                            BailoutState::TOS_REGISTER);
   2149     // Push the target function under the receiver.
   2150     PushOperand(Operand(rsp, 0));
   2151     __ movp(Operand(rsp, kPointerSize), rax);
   2152     convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
   2153   }
   2154 
   2155   EmitCall(expr, convert_mode);
   2156 }
   2157 
   2158 
   2159 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
   2160   Expression* callee = expr->expression();
   2161   DCHECK(callee->IsProperty());
   2162   Property* prop = callee->AsProperty();
   2163   DCHECK(prop->IsSuperAccess());
   2164   SetExpressionPosition(prop);
   2165 
   2166   Literal* key = prop->key()->AsLiteral();
   2167   DCHECK(!key->value()->IsSmi());
   2168   // Load the function from the receiver.
   2169   SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
   2170   VisitForStackValue(super_ref->home_object());
   2171   VisitForAccumulatorValue(super_ref->this_var());
   2172   PushOperand(rax);
   2173   PushOperand(rax);
   2174   PushOperand(Operand(rsp, kPointerSize * 2));
   2175   PushOperand(key->value());
   2176 
   2177   // Stack here:
   2178   //  - home_object
   2179   //  - this (receiver)
   2180   //  - this (receiver) <-- LoadFromSuper will pop here and below.
   2181   //  - home_object
   2182   //  - key
   2183   CallRuntimeWithOperands(Runtime::kLoadFromSuper);
   2184   PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
   2185 
   2186   // Replace home_object with target function.
   2187   __ movp(Operand(rsp, kPointerSize), rax);
   2188 
   2189   // Stack here:
   2190   // - target function
   2191   // - this (receiver)
   2192   EmitCall(expr);
   2193 }
   2194 
   2195 
   2196 // Common code for calls using the IC.
   2197 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
   2198                                                 Expression* key) {
   2199   // Load the key.
   2200   VisitForAccumulatorValue(key);
   2201 
   2202   Expression* callee = expr->expression();
   2203 
   2204   // Load the function from the receiver.
   2205   DCHECK(callee->IsProperty());
   2206   __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
   2207   __ Move(LoadDescriptor::NameRegister(), rax);
   2208   EmitKeyedPropertyLoad(callee->AsProperty());
   2209   PrepareForBailoutForId(callee->AsProperty()->LoadId(),
   2210                          BailoutState::TOS_REGISTER);
   2211 
   2212   // Push the target function under the receiver.
   2213   PushOperand(Operand(rsp, 0));
   2214   __ movp(Operand(rsp, kPointerSize), rax);
   2215 
   2216   EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
   2217 }
   2218 
   2219 
   2220 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
   2221   Expression* callee = expr->expression();
   2222   DCHECK(callee->IsProperty());
   2223   Property* prop = callee->AsProperty();
   2224   DCHECK(prop->IsSuperAccess());
   2225 
   2226   SetExpressionPosition(prop);
   2227   // Load the function from the receiver.
   2228   SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
   2229   VisitForStackValue(super_ref->home_object());
   2230   VisitForAccumulatorValue(super_ref->this_var());
   2231   PushOperand(rax);
   2232   PushOperand(rax);
   2233   PushOperand(Operand(rsp, kPointerSize * 2));
   2234   VisitForStackValue(prop->key());
   2235 
   2236   // Stack here:
   2237   //  - home_object
   2238   //  - this (receiver)
   2239   //  - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
   2240   //  - home_object
   2241   //  - key
   2242   CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
   2243   PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
   2244 
   2245   // Replace home_object with target function.
   2246   __ movp(Operand(rsp, kPointerSize), rax);
   2247 
   2248   // Stack here:
   2249   // - target function
   2250   // - this (receiver)
   2251   EmitCall(expr);
   2252 }
   2253 
   2254 
   2255 void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
   2256   // Load the arguments.
   2257   ZoneList<Expression*>* args = expr->arguments();
   2258   int arg_count = args->length();
   2259   for (int i = 0; i < arg_count; i++) {
   2260     VisitForStackValue(args->at(i));
   2261   }
   2262 
   2263   PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
   2264   SetCallPosition(expr, expr->tail_call_mode());
   2265   if (expr->tail_call_mode() == TailCallMode::kAllow) {
   2266     if (FLAG_trace) {
   2267       __ CallRuntime(Runtime::kTraceTailCall);
   2268     }
   2269     // Update profiling counters before the tail call since we will
   2270     // not return to this function.
   2271     EmitProfilingCounterHandlingForReturnSequence(true);
   2272   }
   2273   Handle<Code> code =
   2274       CodeFactory::CallIC(isolate(), mode, expr->tail_call_mode()).code();
   2275   __ Move(rdx, SmiFromSlot(expr->CallFeedbackICSlot()));
   2276   __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
   2277   __ Set(rax, arg_count);
   2278   CallIC(code);
   2279   OperandStackDepthDecrement(arg_count + 1);
   2280 
   2281   RecordJSReturnSite(expr);
   2282   RestoreContext();
   2283   // Discard the function left on TOS.
   2284   context()->DropAndPlug(1, rax);
   2285 }
   2286 
   2287 void FullCodeGenerator::EmitResolvePossiblyDirectEval(Call* expr) {
   2288   int arg_count = expr->arguments()->length();
   2289   // Push copy of the first argument or undefined if it doesn't exist.
   2290   if (arg_count > 0) {
   2291     __ Push(Operand(rsp, arg_count * kPointerSize));
   2292   } else {
   2293     __ PushRoot(Heap::kUndefinedValueRootIndex);
   2294   }
   2295 
   2296   // Push the enclosing function.
   2297   __ Push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
   2298 
   2299   // Push the language mode.
   2300   __ Push(Smi::FromInt(language_mode()));
   2301 
   2302   // Push the start position of the scope the calls resides in.
   2303   __ Push(Smi::FromInt(scope()->start_position()));
   2304 
   2305   // Push the source position of the eval call.
   2306   __ Push(Smi::FromInt(expr->position()));
   2307 
   2308   // Do the runtime call.
   2309   __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
   2310 }
   2311 
   2312 
   2313 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
   2314 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
   2315   VariableProxy* callee = expr->expression()->AsVariableProxy();
   2316   if (callee->var()->IsLookupSlot()) {
   2317     Label slow, done;
   2318     SetExpressionPosition(callee);
   2319     // Generate code for loading from variables potentially shadowed by
   2320     // eval-introduced variables.
   2321     EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
   2322     __ bind(&slow);
   2323     // Call the runtime to find the function to call (returned in rax) and
   2324     // the object holding it (returned in rdx).
   2325     __ Push(callee->name());
   2326     __ CallRuntime(Runtime::kLoadLookupSlotForCall);
   2327     PushOperand(rax);  // Function.
   2328     PushOperand(rdx);  // Receiver.
   2329     PrepareForBailoutForId(expr->LookupId(), BailoutState::NO_REGISTERS);
   2330 
   2331     // If fast case code has been generated, emit code to push the function
   2332     // and receiver and have the slow path jump around this code.
   2333     if (done.is_linked()) {
   2334       Label call;
   2335       __ jmp(&call, Label::kNear);
   2336       __ bind(&done);
   2337       // Push function.
   2338       __ Push(rax);
   2339       // Pass undefined as the receiver, which is the WithBaseObject of a
   2340       // non-object environment record.  If the callee is sloppy, it will patch
   2341       // it up to be the global receiver.
   2342       __ PushRoot(Heap::kUndefinedValueRootIndex);
   2343       __ bind(&call);
   2344     }
   2345   } else {
   2346     VisitForStackValue(callee);
   2347     // refEnv.WithBaseObject()
   2348     OperandStackDepthIncrement(1);
   2349     __ PushRoot(Heap::kUndefinedValueRootIndex);
   2350   }
   2351 }
   2352 
   2353 
   2354 void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
   2355   // In a call to eval, we first call Runtime_ResolvePossiblyDirectEval
   2356   // to resolve the function we need to call.  Then we call the resolved
   2357   // function using the given arguments.
   2358   ZoneList<Expression*>* args = expr->arguments();
   2359   int arg_count = args->length();
   2360   PushCalleeAndWithBaseObject(expr);
   2361 
   2362   // Push the arguments.
   2363   for (int i = 0; i < arg_count; i++) {
   2364     VisitForStackValue(args->at(i));
   2365   }
   2366 
   2367   // Push a copy of the function (found below the arguments) and resolve
   2368   // eval.
   2369   __ Push(Operand(rsp, (arg_count + 1) * kPointerSize));
   2370   EmitResolvePossiblyDirectEval(expr);
   2371 
   2372   // Touch up the callee.
   2373   __ movp(Operand(rsp, (arg_count + 1) * kPointerSize), rax);
   2374 
   2375   PrepareForBailoutForId(expr->EvalId(), BailoutState::NO_REGISTERS);
   2376 
   2377   SetCallPosition(expr);
   2378   Handle<Code> code = CodeFactory::CallIC(isolate(), ConvertReceiverMode::kAny,
   2379                                           expr->tail_call_mode())
   2380                           .code();
   2381   __ Move(rdx, SmiFromSlot(expr->CallFeedbackICSlot()));
   2382   __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
   2383   __ Set(rax, arg_count);
   2384   __ call(code, RelocInfo::CODE_TARGET);
   2385   OperandStackDepthDecrement(arg_count + 1);
   2386   RecordJSReturnSite(expr);
   2387   RestoreContext();
   2388   context()->DropAndPlug(1, rax);
   2389 }
   2390 
   2391 
   2392 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
   2393   Comment cmnt(masm_, "[ CallNew");
   2394   // According to ECMA-262, section 11.2.2, page 44, the function
   2395   // expression in new calls must be evaluated before the
   2396   // arguments.
   2397 
   2398   // Push constructor on the stack.  If it's not a function it's used as
   2399   // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
   2400   // ignored.
   2401   DCHECK(!expr->expression()->IsSuperPropertyReference());
   2402   VisitForStackValue(expr->expression());
   2403 
   2404   // Push the arguments ("left-to-right") on the stack.
   2405   ZoneList<Expression*>* args = expr->arguments();
   2406   int arg_count = args->length();
   2407   for (int i = 0; i < arg_count; i++) {
   2408     VisitForStackValue(args->at(i));
   2409   }
   2410 
   2411   // Call the construct call builtin that handles allocation and
   2412   // constructor invocation.
   2413   SetConstructCallPosition(expr);
   2414 
   2415   // Load function and argument count into rdi and rax.
   2416   __ Set(rax, arg_count);
   2417   __ movp(rdi, Operand(rsp, arg_count * kPointerSize));
   2418 
   2419   // Record call targets in unoptimized code, but not in the snapshot.
   2420   __ EmitLoadTypeFeedbackVector(rbx);
   2421   __ Move(rdx, SmiFromSlot(expr->CallNewFeedbackSlot()));
   2422 
   2423   CallConstructStub stub(isolate());
   2424   CallIC(stub.GetCode());
   2425   OperandStackDepthDecrement(arg_count + 1);
   2426   PrepareForBailoutForId(expr->ReturnId(), BailoutState::TOS_REGISTER);
   2427   RestoreContext();
   2428   context()->Plug(rax);
   2429 }
   2430 
   2431 
   2432 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
   2433   SuperCallReference* super_call_ref =
   2434       expr->expression()->AsSuperCallReference();
   2435   DCHECK_NOT_NULL(super_call_ref);
   2436 
   2437   // Push the super constructor target on the stack (may be null,
   2438   // but the Construct builtin can deal with that properly).
   2439   VisitForAccumulatorValue(super_call_ref->this_function_var());
   2440   __ AssertFunction(result_register());
   2441   __ movp(result_register(),
   2442           FieldOperand(result_register(), HeapObject::kMapOffset));
   2443   PushOperand(FieldOperand(result_register(), Map::kPrototypeOffset));
   2444 
   2445   // Push the arguments ("left-to-right") on the stack.
   2446   ZoneList<Expression*>* args = expr->arguments();
   2447   int arg_count = args->length();
   2448   for (int i = 0; i < arg_count; i++) {
   2449     VisitForStackValue(args->at(i));
   2450   }
   2451 
   2452   // Call the construct call builtin that handles allocation and
   2453   // constructor invocation.
   2454   SetConstructCallPosition(expr);
   2455 
   2456   // Load new target into rdx.
   2457   VisitForAccumulatorValue(super_call_ref->new_target_var());
   2458   __ movp(rdx, result_register());
   2459 
   2460   // Load function and argument count into rdi and rax.
   2461   __ Set(rax, arg_count);
   2462   __ movp(rdi, Operand(rsp, arg_count * kPointerSize));
   2463 
   2464   __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
   2465   OperandStackDepthDecrement(arg_count + 1);
   2466 
   2467   RecordJSReturnSite(expr);
   2468   RestoreContext();
   2469   context()->Plug(rax);
   2470 }
   2471 
   2472 
   2473 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
   2474   ZoneList<Expression*>* args = expr->arguments();
   2475   DCHECK(args->length() == 1);
   2476 
   2477   VisitForAccumulatorValue(args->at(0));
   2478 
   2479   Label materialize_true, materialize_false;
   2480   Label* if_true = NULL;
   2481   Label* if_false = NULL;
   2482   Label* fall_through = NULL;
   2483   context()->PrepareTest(&materialize_true, &materialize_false,
   2484                          &if_true, &if_false, &fall_through);
   2485 
   2486   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2487   __ JumpIfSmi(rax, if_true);
   2488   __ jmp(if_false);
   2489 
   2490   context()->Plug(if_true, if_false);
   2491 }
   2492 
   2493 
   2494 void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
   2495   ZoneList<Expression*>* args = expr->arguments();
   2496   DCHECK(args->length() == 1);
   2497 
   2498   VisitForAccumulatorValue(args->at(0));
   2499 
   2500   Label materialize_true, materialize_false;
   2501   Label* if_true = NULL;
   2502   Label* if_false = NULL;
   2503   Label* fall_through = NULL;
   2504   context()->PrepareTest(&materialize_true, &materialize_false,
   2505                          &if_true, &if_false, &fall_through);
   2506 
   2507   __ JumpIfSmi(rax, if_false);
   2508   __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rbx);
   2509   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2510   Split(above_equal, if_true, if_false, fall_through);
   2511 
   2512   context()->Plug(if_true, if_false);
   2513 }
   2514 
   2515 
   2516 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
   2517   ZoneList<Expression*>* args = expr->arguments();
   2518   DCHECK(args->length() == 1);
   2519 
   2520   VisitForAccumulatorValue(args->at(0));
   2521 
   2522   Label materialize_true, materialize_false;
   2523   Label* if_true = NULL;
   2524   Label* if_false = NULL;
   2525   Label* fall_through = NULL;
   2526   context()->PrepareTest(&materialize_true, &materialize_false,
   2527                          &if_true, &if_false, &fall_through);
   2528 
   2529   __ JumpIfSmi(rax, if_false);
   2530   __ CmpObjectType(rax, JS_ARRAY_TYPE, rbx);
   2531   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2532   Split(equal, if_true, if_false, fall_through);
   2533 
   2534   context()->Plug(if_true, if_false);
   2535 }
   2536 
   2537 
   2538 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
   2539   ZoneList<Expression*>* args = expr->arguments();
   2540   DCHECK(args->length() == 1);
   2541 
   2542   VisitForAccumulatorValue(args->at(0));
   2543 
   2544   Label materialize_true, materialize_false;
   2545   Label* if_true = NULL;
   2546   Label* if_false = NULL;
   2547   Label* fall_through = NULL;
   2548   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
   2549                          &if_false, &fall_through);
   2550 
   2551   __ JumpIfSmi(rax, if_false);
   2552   __ CmpObjectType(rax, JS_TYPED_ARRAY_TYPE, rbx);
   2553   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2554   Split(equal, if_true, if_false, fall_through);
   2555 
   2556   context()->Plug(if_true, if_false);
   2557 }
   2558 
   2559 
   2560 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
   2561   ZoneList<Expression*>* args = expr->arguments();
   2562   DCHECK(args->length() == 1);
   2563 
   2564   VisitForAccumulatorValue(args->at(0));
   2565 
   2566   Label materialize_true, materialize_false;
   2567   Label* if_true = NULL;
   2568   Label* if_false = NULL;
   2569   Label* fall_through = NULL;
   2570   context()->PrepareTest(&materialize_true, &materialize_false,
   2571                          &if_true, &if_false, &fall_through);
   2572 
   2573   __ JumpIfSmi(rax, if_false);
   2574   __ CmpObjectType(rax, JS_REGEXP_TYPE, rbx);
   2575   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2576   Split(equal, if_true, if_false, fall_through);
   2577 
   2578   context()->Plug(if_true, if_false);
   2579 }
   2580 
   2581 
   2582 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
   2583   ZoneList<Expression*>* args = expr->arguments();
   2584   DCHECK(args->length() == 1);
   2585 
   2586   VisitForAccumulatorValue(args->at(0));
   2587 
   2588   Label materialize_true, materialize_false;
   2589   Label* if_true = NULL;
   2590   Label* if_false = NULL;
   2591   Label* fall_through = NULL;
   2592   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
   2593                          &if_false, &fall_through);
   2594 
   2595 
   2596   __ JumpIfSmi(rax, if_false);
   2597   __ CmpObjectType(rax, JS_PROXY_TYPE, rbx);
   2598   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2599   Split(equal, if_true, if_false, fall_through);
   2600 
   2601   context()->Plug(if_true, if_false);
   2602 }
   2603 
   2604 
   2605 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
   2606   ZoneList<Expression*>* args = expr->arguments();
   2607   DCHECK(args->length() == 1);
   2608   Label done, null, function, non_function_constructor;
   2609 
   2610   VisitForAccumulatorValue(args->at(0));
   2611 
   2612   // If the object is not a JSReceiver, we return null.
   2613   __ JumpIfSmi(rax, &null, Label::kNear);
   2614   STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
   2615   __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rax);
   2616   __ j(below, &null, Label::kNear);
   2617 
   2618   // Return 'Function' for JSFunction and JSBoundFunction objects.
   2619   __ CmpInstanceType(rax, FIRST_FUNCTION_TYPE);
   2620   STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
   2621   __ j(above_equal, &function, Label::kNear);
   2622 
   2623   // Check if the constructor in the map is a JS function.
   2624   __ GetMapConstructor(rax, rax, rbx);
   2625   __ CmpInstanceType(rbx, JS_FUNCTION_TYPE);
   2626   __ j(not_equal, &non_function_constructor, Label::kNear);
   2627 
   2628   // rax now contains the constructor function. Grab the
   2629   // instance class name from there.
   2630   __ movp(rax, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset));
   2631   __ movp(rax, FieldOperand(rax, SharedFunctionInfo::kInstanceClassNameOffset));
   2632   __ jmp(&done, Label::kNear);
   2633 
   2634   // Non-JS objects have class null.
   2635   __ bind(&null);
   2636   __ LoadRoot(rax, Heap::kNullValueRootIndex);
   2637   __ jmp(&done, Label::kNear);
   2638 
   2639   // Functions have class 'Function'.
   2640   __ bind(&function);
   2641   __ LoadRoot(rax, Heap::kFunction_stringRootIndex);
   2642   __ jmp(&done, Label::kNear);
   2643 
   2644   // Objects with a non-function constructor have class 'Object'.
   2645   __ bind(&non_function_constructor);
   2646   __ LoadRoot(rax, Heap::kObject_stringRootIndex);
   2647 
   2648   // All done.
   2649   __ bind(&done);
   2650 
   2651   context()->Plug(rax);
   2652 }
   2653 
   2654 
   2655 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
   2656   ZoneList<Expression*>* args = expr->arguments();
   2657   DCHECK(args->length() == 2);
   2658 
   2659   VisitForStackValue(args->at(0));
   2660   VisitForAccumulatorValue(args->at(1));
   2661 
   2662   Register object = rbx;
   2663   Register index = rax;
   2664   Register result = rdx;
   2665 
   2666   PopOperand(object);
   2667 
   2668   Label need_conversion;
   2669   Label index_out_of_range;
   2670   Label done;
   2671   StringCharCodeAtGenerator generator(object, index, result, &need_conversion,
   2672                                       &need_conversion, &index_out_of_range);
   2673   generator.GenerateFast(masm_);
   2674   __ jmp(&done);
   2675 
   2676   __ bind(&index_out_of_range);
   2677   // When the index is out of range, the spec requires us to return
   2678   // NaN.
   2679   __ LoadRoot(result, Heap::kNanValueRootIndex);
   2680   __ jmp(&done);
   2681 
   2682   __ bind(&need_conversion);
   2683   // Move the undefined value into the result register, which will
   2684   // trigger conversion.
   2685   __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
   2686   __ jmp(&done);
   2687 
   2688   NopRuntimeCallHelper call_helper;
   2689   generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
   2690 
   2691   __ bind(&done);
   2692   context()->Plug(result);
   2693 }
   2694 
   2695 
   2696 void FullCodeGenerator::EmitCall(CallRuntime* expr) {
   2697   ZoneList<Expression*>* args = expr->arguments();
   2698   DCHECK_LE(2, args->length());
   2699   // Push target, receiver and arguments onto the stack.
   2700   for (Expression* const arg : *args) {
   2701     VisitForStackValue(arg);
   2702   }
   2703   PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
   2704   // Move target to rdi.
   2705   int const argc = args->length() - 2;
   2706   __ movp(rdi, Operand(rsp, (argc + 1) * kPointerSize));
   2707   // Call the target.
   2708   __ Set(rax, argc);
   2709   __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
   2710   OperandStackDepthDecrement(argc + 1);
   2711   RestoreContext();
   2712   // Discard the function left on TOS.
   2713   context()->DropAndPlug(1, rax);
   2714 }
   2715 
   2716 void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
   2717   ZoneList<Expression*>* args = expr->arguments();
   2718   DCHECK_EQ(1, args->length());
   2719   VisitForAccumulatorValue(args->at(0));
   2720   __ AssertFunction(rax);
   2721   __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset));
   2722   __ movp(rax, FieldOperand(rax, Map::kPrototypeOffset));
   2723   context()->Plug(rax);
   2724 }
   2725 
   2726 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
   2727   DCHECK(expr->arguments()->length() == 0);
   2728   ExternalReference debug_is_active =
   2729       ExternalReference::debug_is_active_address(isolate());
   2730   __ Move(kScratchRegister, debug_is_active);
   2731   __ movzxbp(rax, Operand(kScratchRegister, 0));
   2732   __ Integer32ToSmi(rax, rax);
   2733   context()->Plug(rax);
   2734 }
   2735 
   2736 
   2737 void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
   2738   ZoneList<Expression*>* args = expr->arguments();
   2739   DCHECK_EQ(2, args->length());
   2740   VisitForStackValue(args->at(0));
   2741   VisitForStackValue(args->at(1));
   2742 
   2743   Label runtime, done;
   2744 
   2745   __ Allocate(JSIteratorResult::kSize, rax, rcx, rdx, &runtime,
   2746               NO_ALLOCATION_FLAGS);
   2747   __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, rbx);
   2748   __ movp(FieldOperand(rax, HeapObject::kMapOffset), rbx);
   2749   __ LoadRoot(rbx, Heap::kEmptyFixedArrayRootIndex);
   2750   __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), rbx);
   2751   __ movp(FieldOperand(rax, JSObject::kElementsOffset), rbx);
   2752   __ Pop(FieldOperand(rax, JSIteratorResult::kDoneOffset));
   2753   __ Pop(FieldOperand(rax, JSIteratorResult::kValueOffset));
   2754   STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
   2755   __ jmp(&done, Label::kNear);
   2756 
   2757   __ bind(&runtime);
   2758   CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
   2759 
   2760   __ bind(&done);
   2761   context()->Plug(rax);
   2762 }
   2763 
   2764 
   2765 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
   2766   // Push function.
   2767   __ LoadNativeContextSlot(expr->context_index(), rax);
   2768   PushOperand(rax);
   2769 
   2770   // Push undefined as receiver.
   2771   OperandStackDepthIncrement(1);
   2772   __ PushRoot(Heap::kUndefinedValueRootIndex);
   2773 }
   2774 
   2775 
   2776 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
   2777   ZoneList<Expression*>* args = expr->arguments();
   2778   int arg_count = args->length();
   2779 
   2780   SetCallPosition(expr);
   2781   __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
   2782   __ Set(rax, arg_count);
   2783   __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
   2784           RelocInfo::CODE_TARGET);
   2785   OperandStackDepthDecrement(arg_count + 1);
   2786   RestoreContext();
   2787 }
   2788 
   2789 
   2790 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
   2791   switch (expr->op()) {
   2792     case Token::DELETE: {
   2793       Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
   2794       Property* property = expr->expression()->AsProperty();
   2795       VariableProxy* proxy = expr->expression()->AsVariableProxy();
   2796 
   2797       if (property != NULL) {
   2798         VisitForStackValue(property->obj());
   2799         VisitForStackValue(property->key());
   2800         CallRuntimeWithOperands(is_strict(language_mode())
   2801                                     ? Runtime::kDeleteProperty_Strict
   2802                                     : Runtime::kDeleteProperty_Sloppy);
   2803         context()->Plug(rax);
   2804       } else if (proxy != NULL) {
   2805         Variable* var = proxy->var();
   2806         // Delete of an unqualified identifier is disallowed in strict mode but
   2807         // "delete this" is allowed.
   2808         bool is_this = var->is_this();
   2809         DCHECK(is_sloppy(language_mode()) || is_this);
   2810         if (var->IsUnallocated()) {
   2811           __ movp(rax, NativeContextOperand());
   2812           __ Push(ContextOperand(rax, Context::EXTENSION_INDEX));
   2813           __ Push(var->name());
   2814           __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
   2815           context()->Plug(rax);
   2816         } else if (var->IsStackAllocated() || var->IsContextSlot()) {
   2817           // Result of deleting non-global variables is false.  'this' is
   2818           // not really a variable, though we implement it as one.  The
   2819           // subexpression does not have side effects.
   2820           context()->Plug(is_this);
   2821         } else {
   2822           // Non-global variable.  Call the runtime to try to delete from the
   2823           // context where the variable was introduced.
   2824           __ Push(var->name());
   2825           __ CallRuntime(Runtime::kDeleteLookupSlot);
   2826           context()->Plug(rax);
   2827         }
   2828       } else {
   2829         // Result of deleting non-property, non-variable reference is true.
   2830         // The subexpression may have side effects.
   2831         VisitForEffect(expr->expression());
   2832         context()->Plug(true);
   2833       }
   2834       break;
   2835     }
   2836 
   2837     case Token::VOID: {
   2838       Comment cmnt(masm_, "[ UnaryOperation (VOID)");
   2839       VisitForEffect(expr->expression());
   2840       context()->Plug(Heap::kUndefinedValueRootIndex);
   2841       break;
   2842     }
   2843 
   2844     case Token::NOT: {
   2845       Comment cmnt(masm_, "[ UnaryOperation (NOT)");
   2846       if (context()->IsEffect()) {
   2847         // Unary NOT has no side effects so it's only necessary to visit the
   2848         // subexpression.  Match the optimizing compiler by not branching.
   2849         VisitForEffect(expr->expression());
   2850       } else if (context()->IsTest()) {
   2851         const TestContext* test = TestContext::cast(context());
   2852         // The labels are swapped for the recursive call.
   2853         VisitForControl(expr->expression(),
   2854                         test->false_label(),
   2855                         test->true_label(),
   2856                         test->fall_through());
   2857         context()->Plug(test->true_label(), test->false_label());
   2858       } else {
   2859         // We handle value contexts explicitly rather than simply visiting
   2860         // for control and plugging the control flow into the context,
   2861         // because we need to prepare a pair of extra administrative AST ids
   2862         // for the optimizing compiler.
   2863         DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
   2864         Label materialize_true, materialize_false, done;
   2865         VisitForControl(expr->expression(),
   2866                         &materialize_false,
   2867                         &materialize_true,
   2868                         &materialize_true);
   2869         if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
   2870         __ bind(&materialize_true);
   2871         PrepareForBailoutForId(expr->MaterializeTrueId(),
   2872                                BailoutState::NO_REGISTERS);
   2873         if (context()->IsAccumulatorValue()) {
   2874           __ LoadRoot(rax, Heap::kTrueValueRootIndex);
   2875         } else {
   2876           __ PushRoot(Heap::kTrueValueRootIndex);
   2877         }
   2878         __ jmp(&done, Label::kNear);
   2879         __ bind(&materialize_false);
   2880         PrepareForBailoutForId(expr->MaterializeFalseId(),
   2881                                BailoutState::NO_REGISTERS);
   2882         if (context()->IsAccumulatorValue()) {
   2883           __ LoadRoot(rax, Heap::kFalseValueRootIndex);
   2884         } else {
   2885           __ PushRoot(Heap::kFalseValueRootIndex);
   2886         }
   2887         __ bind(&done);
   2888       }
   2889       break;
   2890     }
   2891 
   2892     case Token::TYPEOF: {
   2893       Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
   2894       {
   2895         AccumulatorValueContext context(this);
   2896         VisitForTypeofValue(expr->expression());
   2897       }
   2898       __ movp(rbx, rax);
   2899       __ Call(isolate()->builtins()->Typeof(), RelocInfo::CODE_TARGET);
   2900       context()->Plug(rax);
   2901       break;
   2902     }
   2903 
   2904     default:
   2905       UNREACHABLE();
   2906   }
   2907 }
   2908 
   2909 
   2910 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
   2911   DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
   2912 
   2913   Comment cmnt(masm_, "[ CountOperation");
   2914 
   2915   Property* prop = expr->expression()->AsProperty();
   2916   LhsKind assign_type = Property::GetAssignType(prop);
   2917 
   2918   // Evaluate expression and get value.
   2919   if (assign_type == VARIABLE) {
   2920     DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
   2921     AccumulatorValueContext context(this);
   2922     EmitVariableLoad(expr->expression()->AsVariableProxy());
   2923   } else {
   2924     // Reserve space for result of postfix operation.
   2925     if (expr->is_postfix() && !context()->IsEffect()) {
   2926       PushOperand(Smi::kZero);
   2927     }
   2928     switch (assign_type) {
   2929       case NAMED_PROPERTY: {
   2930         VisitForStackValue(prop->obj());
   2931         __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
   2932         EmitNamedPropertyLoad(prop);
   2933         break;
   2934       }
   2935 
   2936       case NAMED_SUPER_PROPERTY: {
   2937         VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
   2938         VisitForAccumulatorValue(
   2939             prop->obj()->AsSuperPropertyReference()->home_object());
   2940         PushOperand(result_register());
   2941         PushOperand(MemOperand(rsp, kPointerSize));
   2942         PushOperand(result_register());
   2943         EmitNamedSuperPropertyLoad(prop);
   2944         break;
   2945       }
   2946 
   2947       case KEYED_SUPER_PROPERTY: {
   2948         VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
   2949         VisitForStackValue(
   2950             prop->obj()->AsSuperPropertyReference()->home_object());
   2951         VisitForAccumulatorValue(prop->key());
   2952         PushOperand(result_register());
   2953         PushOperand(MemOperand(rsp, 2 * kPointerSize));
   2954         PushOperand(MemOperand(rsp, 2 * kPointerSize));
   2955         PushOperand(result_register());
   2956         EmitKeyedSuperPropertyLoad(prop);
   2957         break;
   2958       }
   2959 
   2960       case KEYED_PROPERTY: {
   2961         VisitForStackValue(prop->obj());
   2962         VisitForStackValue(prop->key());
   2963         // Leave receiver on stack
   2964         __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, kPointerSize));
   2965         // Copy of key, needed for later store.
   2966         __ movp(LoadDescriptor::NameRegister(), Operand(rsp, 0));
   2967         EmitKeyedPropertyLoad(prop);
   2968         break;
   2969       }
   2970 
   2971       case VARIABLE:
   2972         UNREACHABLE();
   2973     }
   2974   }
   2975 
   2976   // We need a second deoptimization point after loading the value
   2977   // in case evaluating the property load my have a side effect.
   2978   if (assign_type == VARIABLE) {
   2979     PrepareForBailout(expr->expression(), BailoutState::TOS_REGISTER);
   2980   } else {
   2981     PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
   2982   }
   2983 
   2984   // Inline smi case if we are in a loop.
   2985   Label done, stub_call;
   2986   JumpPatchSite patch_site(masm_);
   2987   if (ShouldInlineSmiCase(expr->op())) {
   2988     Label slow;
   2989     patch_site.EmitJumpIfNotSmi(rax, &slow, Label::kNear);
   2990 
   2991     // Save result for postfix expressions.
   2992     if (expr->is_postfix()) {
   2993       if (!context()->IsEffect()) {
   2994         // Save the result on the stack. If we have a named or keyed property
   2995         // we store the result under the receiver that is currently on top
   2996         // of the stack.
   2997         switch (assign_type) {
   2998           case VARIABLE:
   2999             __ Push(rax);
   3000             break;
   3001           case NAMED_PROPERTY:
   3002             __ movp(Operand(rsp, kPointerSize), rax);
   3003             break;
   3004           case NAMED_SUPER_PROPERTY:
   3005             __ movp(Operand(rsp, 2 * kPointerSize), rax);
   3006             break;
   3007           case KEYED_PROPERTY:
   3008             __ movp(Operand(rsp, 2 * kPointerSize), rax);
   3009             break;
   3010           case KEYED_SUPER_PROPERTY:
   3011             __ movp(Operand(rsp, 3 * kPointerSize), rax);
   3012             break;
   3013         }
   3014       }
   3015     }
   3016 
   3017     SmiOperationConstraints constraints =
   3018         SmiOperationConstraint::kPreserveSourceRegister |
   3019         SmiOperationConstraint::kBailoutOnNoOverflow;
   3020     if (expr->op() == Token::INC) {
   3021       __ SmiAddConstant(rax, rax, Smi::FromInt(1), constraints, &done,
   3022                         Label::kNear);
   3023     } else {
   3024       __ SmiSubConstant(rax, rax, Smi::FromInt(1), constraints, &done,
   3025                         Label::kNear);
   3026     }
   3027     __ jmp(&stub_call, Label::kNear);
   3028     __ bind(&slow);
   3029   }
   3030 
   3031   // Convert old value into a number.
   3032   __ Call(isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
   3033   RestoreContext();
   3034   PrepareForBailoutForId(expr->ToNumberId(), BailoutState::TOS_REGISTER);
   3035 
   3036   // Save result for postfix expressions.
   3037   if (expr->is_postfix()) {
   3038     if (!context()->IsEffect()) {
   3039       // Save the result on the stack. If we have a named or keyed property
   3040       // we store the result under the receiver that is currently on top
   3041       // of the stack.
   3042       switch (assign_type) {
   3043         case VARIABLE:
   3044           PushOperand(rax);
   3045           break;
   3046         case NAMED_PROPERTY:
   3047           __ movp(Operand(rsp, kPointerSize), rax);
   3048           break;
   3049         case NAMED_SUPER_PROPERTY:
   3050           __ movp(Operand(rsp, 2 * kPointerSize), rax);
   3051           break;
   3052         case KEYED_PROPERTY:
   3053           __ movp(Operand(rsp, 2 * kPointerSize), rax);
   3054           break;
   3055         case KEYED_SUPER_PROPERTY:
   3056           __ movp(Operand(rsp, 3 * kPointerSize), rax);
   3057           break;
   3058       }
   3059     }
   3060   }
   3061 
   3062   SetExpressionPosition(expr);
   3063 
   3064   // Call stub for +1/-1.
   3065   __ bind(&stub_call);
   3066   __ movp(rdx, rax);
   3067   __ Move(rax, Smi::FromInt(1));
   3068   Handle<Code> code =
   3069       CodeFactory::BinaryOpIC(isolate(), expr->binary_op()).code();
   3070   CallIC(code, expr->CountBinOpFeedbackId());
   3071   patch_site.EmitPatchInfo();
   3072   __ bind(&done);
   3073 
   3074   // Store the value returned in rax.
   3075   switch (assign_type) {
   3076     case VARIABLE: {
   3077       VariableProxy* proxy = expr->expression()->AsVariableProxy();
   3078       if (expr->is_postfix()) {
   3079         // Perform the assignment as if via '='.
   3080         { EffectContext context(this);
   3081           EmitVariableAssignment(proxy->var(), Token::ASSIGN, expr->CountSlot(),
   3082                                  proxy->hole_check_mode());
   3083           PrepareForBailoutForId(expr->AssignmentId(),
   3084                                  BailoutState::TOS_REGISTER);
   3085           context.Plug(rax);
   3086         }
   3087         // For all contexts except kEffect: We have the result on
   3088         // top of the stack.
   3089         if (!context()->IsEffect()) {
   3090           context()->PlugTOS();
   3091         }
   3092       } else {
   3093         // Perform the assignment as if via '='.
   3094         EmitVariableAssignment(proxy->var(), Token::ASSIGN, expr->CountSlot(),
   3095                                proxy->hole_check_mode());
   3096         PrepareForBailoutForId(expr->AssignmentId(),
   3097                                BailoutState::TOS_REGISTER);
   3098         context()->Plug(rax);
   3099       }
   3100       break;
   3101     }
   3102     case NAMED_PROPERTY: {
   3103       PopOperand(StoreDescriptor::ReceiverRegister());
   3104       CallStoreIC(expr->CountSlot(), prop->key()->AsLiteral()->value());
   3105       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
   3106       if (expr->is_postfix()) {
   3107         if (!context()->IsEffect()) {
   3108           context()->PlugTOS();
   3109         }
   3110       } else {
   3111         context()->Plug(rax);
   3112       }
   3113       break;
   3114     }
   3115     case NAMED_SUPER_PROPERTY: {
   3116       EmitNamedSuperPropertyStore(prop);
   3117       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
   3118       if (expr->is_postfix()) {
   3119         if (!context()->IsEffect()) {
   3120           context()->PlugTOS();
   3121         }
   3122       } else {
   3123         context()->Plug(rax);
   3124       }
   3125       break;
   3126     }
   3127     case KEYED_SUPER_PROPERTY: {
   3128       EmitKeyedSuperPropertyStore(prop);
   3129       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
   3130       if (expr->is_postfix()) {
   3131         if (!context()->IsEffect()) {
   3132           context()->PlugTOS();
   3133         }
   3134       } else {
   3135         context()->Plug(rax);
   3136       }
   3137       break;
   3138     }
   3139     case KEYED_PROPERTY: {
   3140       PopOperand(StoreDescriptor::NameRegister());
   3141       PopOperand(StoreDescriptor::ReceiverRegister());
   3142       CallKeyedStoreIC(expr->CountSlot());
   3143       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
   3144       if (expr->is_postfix()) {
   3145         if (!context()->IsEffect()) {
   3146           context()->PlugTOS();
   3147         }
   3148       } else {
   3149         context()->Plug(rax);
   3150       }
   3151       break;
   3152     }
   3153   }
   3154 }
   3155 
   3156 
   3157 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
   3158                                                  Expression* sub_expr,
   3159                                                  Handle<String> check) {
   3160   Label materialize_true, materialize_false;
   3161   Label* if_true = NULL;
   3162   Label* if_false = NULL;
   3163   Label* fall_through = NULL;
   3164   context()->PrepareTest(&materialize_true, &materialize_false,
   3165                          &if_true, &if_false, &fall_through);
   3166 
   3167   { AccumulatorValueContext context(this);
   3168     VisitForTypeofValue(sub_expr);
   3169   }
   3170   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   3171 
   3172   Factory* factory = isolate()->factory();
   3173   if (String::Equals(check, factory->number_string())) {
   3174     __ JumpIfSmi(rax, if_true);
   3175     __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset));
   3176     __ CompareRoot(rax, Heap::kHeapNumberMapRootIndex);
   3177     Split(equal, if_true, if_false, fall_through);
   3178   } else if (String::Equals(check, factory->string_string())) {
   3179     __ JumpIfSmi(rax, if_false);
   3180     __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx);
   3181     Split(below, if_true, if_false, fall_through);
   3182   } else if (String::Equals(check, factory->symbol_string())) {
   3183     __ JumpIfSmi(rax, if_false);
   3184     __ CmpObjectType(rax, SYMBOL_TYPE, rdx);
   3185     Split(equal, if_true, if_false, fall_through);
   3186   } else if (String::Equals(check, factory->boolean_string())) {
   3187     __ CompareRoot(rax, Heap::kTrueValueRootIndex);
   3188     __ j(equal, if_true);
   3189     __ CompareRoot(rax, Heap::kFalseValueRootIndex);
   3190     Split(equal, if_true, if_false, fall_through);
   3191   } else if (String::Equals(check, factory->undefined_string())) {
   3192     __ CompareRoot(rax, Heap::kNullValueRootIndex);
   3193     __ j(equal, if_false);
   3194     __ JumpIfSmi(rax, if_false);
   3195     // Check for undetectable objects => true.
   3196     __ movp(rdx, FieldOperand(rax, HeapObject::kMapOffset));
   3197     __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
   3198              Immediate(1 << Map::kIsUndetectable));
   3199     Split(not_zero, if_true, if_false, fall_through);
   3200   } else if (String::Equals(check, factory->function_string())) {
   3201     __ JumpIfSmi(rax, if_false);
   3202     // Check for callable and not undetectable objects => true.
   3203     __ movp(rdx, FieldOperand(rax, HeapObject::kMapOffset));
   3204     __ movzxbl(rdx, FieldOperand(rdx, Map::kBitFieldOffset));
   3205     __ andb(rdx,
   3206             Immediate((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
   3207     __ cmpb(rdx, Immediate(1 << Map::kIsCallable));
   3208     Split(equal, if_true, if_false, fall_through);
   3209   } else if (String::Equals(check, factory->object_string())) {
   3210     __ JumpIfSmi(rax, if_false);
   3211     __ CompareRoot(rax, Heap::kNullValueRootIndex);
   3212     __ j(equal, if_true);
   3213     STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
   3214     __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rdx);
   3215     __ j(below, if_false);
   3216     // Check for callable or undetectable objects => false.
   3217     __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
   3218              Immediate((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
   3219     Split(zero, if_true, if_false, fall_through);
   3220 // clang-format off
   3221 #define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type)   \
   3222   } else if (String::Equals(check, factory->type##_string())) { \
   3223     __ JumpIfSmi(rax, if_false);                                \
   3224     __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset));    \
   3225     __ CompareRoot(rax, Heap::k##Type##MapRootIndex);           \
   3226     Split(equal, if_true, if_false, fall_through);
   3227   SIMD128_TYPES(SIMD128_TYPE)
   3228 #undef SIMD128_TYPE
   3229     // clang-format on
   3230   } else {
   3231     if (if_false != fall_through) __ jmp(if_false);
   3232   }
   3233   context()->Plug(if_true, if_false);
   3234 }
   3235 
   3236 
   3237 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
   3238   Comment cmnt(masm_, "[ CompareOperation");
   3239 
   3240   // First we try a fast inlined version of the compare when one of
   3241   // the operands is a literal.
   3242   if (TryLiteralCompare(expr)) return;
   3243 
   3244   // Always perform the comparison for its control flow.  Pack the result
   3245   // into the expression's context after the comparison is performed.
   3246   Label materialize_true, materialize_false;
   3247   Label* if_true = NULL;
   3248   Label* if_false = NULL;
   3249   Label* fall_through = NULL;
   3250   context()->PrepareTest(&materialize_true, &materialize_false,
   3251                          &if_true, &if_false, &fall_through);
   3252 
   3253   Token::Value op = expr->op();
   3254   VisitForStackValue(expr->left());
   3255   switch (op) {
   3256     case Token::IN:
   3257       VisitForStackValue(expr->right());
   3258       SetExpressionPosition(expr);
   3259       EmitHasProperty();
   3260       PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
   3261       __ CompareRoot(rax, Heap::kTrueValueRootIndex);
   3262       Split(equal, if_true, if_false, fall_through);
   3263       break;
   3264 
   3265     case Token::INSTANCEOF: {
   3266       VisitForAccumulatorValue(expr->right());
   3267       SetExpressionPosition(expr);
   3268       PopOperand(rdx);
   3269       __ Call(isolate()->builtins()->InstanceOf(), RelocInfo::CODE_TARGET);
   3270       PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
   3271       __ CompareRoot(rax, Heap::kTrueValueRootIndex);
   3272       Split(equal, if_true, if_false, fall_through);
   3273       break;
   3274     }
   3275 
   3276     default: {
   3277       VisitForAccumulatorValue(expr->right());
   3278       SetExpressionPosition(expr);
   3279       Condition cc = CompareIC::ComputeCondition(op);
   3280       PopOperand(rdx);
   3281 
   3282       bool inline_smi_code = ShouldInlineSmiCase(op);
   3283       JumpPatchSite patch_site(masm_);
   3284       if (inline_smi_code) {
   3285         Label slow_case;
   3286         __ movp(rcx, rdx);
   3287         __ orp(rcx, rax);
   3288         patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
   3289         __ cmpp(rdx, rax);
   3290         Split(cc, if_true, if_false, NULL);
   3291         __ bind(&slow_case);
   3292       }
   3293 
   3294       Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
   3295       CallIC(ic, expr->CompareOperationFeedbackId());
   3296       patch_site.EmitPatchInfo();
   3297 
   3298       PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   3299       __ testp(rax, rax);
   3300       Split(cc, if_true, if_false, fall_through);
   3301     }
   3302   }
   3303 
   3304   // Convert the result of the comparison into one expected for this
   3305   // expression's context.
   3306   context()->Plug(if_true, if_false);
   3307 }
   3308 
   3309 
   3310 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
   3311                                               Expression* sub_expr,
   3312                                               NilValue nil) {
   3313   Label materialize_true, materialize_false;
   3314   Label* if_true = NULL;
   3315   Label* if_false = NULL;
   3316   Label* fall_through = NULL;
   3317   context()->PrepareTest(&materialize_true, &materialize_false,
   3318                          &if_true, &if_false, &fall_through);
   3319 
   3320   VisitForAccumulatorValue(sub_expr);
   3321   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   3322   if (expr->op() == Token::EQ_STRICT) {
   3323     Heap::RootListIndex nil_value = nil == kNullValue ?
   3324         Heap::kNullValueRootIndex :
   3325         Heap::kUndefinedValueRootIndex;
   3326     __ CompareRoot(rax, nil_value);
   3327     Split(equal, if_true, if_false, fall_through);
   3328   } else {
   3329     __ JumpIfSmi(rax, if_false);
   3330     __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset));
   3331     __ testb(FieldOperand(rax, Map::kBitFieldOffset),
   3332              Immediate(1 << Map::kIsUndetectable));
   3333     Split(not_zero, if_true, if_false, fall_through);
   3334   }
   3335   context()->Plug(if_true, if_false);
   3336 }
   3337 
   3338 
   3339 Register FullCodeGenerator::result_register() {
   3340   return rax;
   3341 }
   3342 
   3343 
   3344 Register FullCodeGenerator::context_register() {
   3345   return rsi;
   3346 }
   3347 
   3348 void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) {
   3349   DCHECK(IsAligned(frame_offset, kPointerSize));
   3350   __ movp(value, Operand(rbp, frame_offset));
   3351 }
   3352 
   3353 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
   3354   DCHECK(IsAligned(frame_offset, kPointerSize));
   3355   __ movp(Operand(rbp, frame_offset), value);
   3356 }
   3357 
   3358 
   3359 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
   3360   __ movp(dst, ContextOperand(rsi, context_index));
   3361 }
   3362 
   3363 
   3364 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
   3365   DeclarationScope* closure_scope = scope()->GetClosureScope();
   3366   if (closure_scope->is_script_scope() ||
   3367       closure_scope->is_module_scope()) {
   3368     // Contexts nested in the native context have a canonical empty function
   3369     // as their closure, not the anonymous closure containing the global
   3370     // code.
   3371     __ movp(rax, NativeContextOperand());
   3372     PushOperand(ContextOperand(rax, Context::CLOSURE_INDEX));
   3373   } else if (closure_scope->is_eval_scope()) {
   3374     // Contexts created by a call to eval have the same closure as the
   3375     // context calling eval, not the anonymous closure containing the eval
   3376     // code.  Fetch it from the context.
   3377     PushOperand(ContextOperand(rsi, Context::CLOSURE_INDEX));
   3378   } else {
   3379     DCHECK(closure_scope->is_function_scope());
   3380     PushOperand(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
   3381   }
   3382 }
   3383 
   3384 
   3385 // ----------------------------------------------------------------------------
   3386 // Non-local control flow support.
   3387 
   3388 
   3389 void FullCodeGenerator::EnterFinallyBlock() {
   3390   DCHECK(!result_register().is(rdx));
   3391 
   3392   // Store pending message while executing finally block.
   3393   ExternalReference pending_message_obj =
   3394       ExternalReference::address_of_pending_message_obj(isolate());
   3395   __ Load(rdx, pending_message_obj);
   3396   PushOperand(rdx);
   3397 
   3398   ClearPendingMessage();
   3399 }
   3400 
   3401 
   3402 void FullCodeGenerator::ExitFinallyBlock() {
   3403   DCHECK(!result_register().is(rdx));
   3404   // Restore pending message from stack.
   3405   PopOperand(rdx);
   3406   ExternalReference pending_message_obj =
   3407       ExternalReference::address_of_pending_message_obj(isolate());
   3408   __ Store(pending_message_obj, rdx);
   3409 }
   3410 
   3411 
   3412 void FullCodeGenerator::ClearPendingMessage() {
   3413   DCHECK(!result_register().is(rdx));
   3414   ExternalReference pending_message_obj =
   3415       ExternalReference::address_of_pending_message_obj(isolate());
   3416   __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex);
   3417   __ Store(pending_message_obj, rdx);
   3418 }
   3419 
   3420 
   3421 void FullCodeGenerator::DeferredCommands::EmitCommands() {
   3422   __ Pop(result_register());  // Restore the accumulator.
   3423   __ Pop(rdx);                // Get the token.
   3424   for (DeferredCommand cmd : commands_) {
   3425     Label skip;
   3426     __ SmiCompare(rdx, Smi::FromInt(cmd.token));
   3427     __ j(not_equal, &skip);
   3428     switch (cmd.command) {
   3429       case kReturn:
   3430         codegen_->EmitUnwindAndReturn();
   3431         break;
   3432       case kThrow:
   3433         __ Push(result_register());
   3434         __ CallRuntime(Runtime::kReThrow);
   3435         break;
   3436       case kContinue:
   3437         codegen_->EmitContinue(cmd.target);
   3438         break;
   3439       case kBreak:
   3440         codegen_->EmitBreak(cmd.target);
   3441         break;
   3442     }
   3443     __ bind(&skip);
   3444   }
   3445 }
   3446 
   3447 #undef __
   3448 
   3449 
   3450 static const byte kJnsInstruction = 0x79;
   3451 static const byte kNopByteOne = 0x66;
   3452 static const byte kNopByteTwo = 0x90;
   3453 #ifdef DEBUG
   3454 static const byte kCallInstruction = 0xe8;
   3455 #endif
   3456 
   3457 
   3458 void BackEdgeTable::PatchAt(Code* unoptimized_code,
   3459                             Address pc,
   3460                             BackEdgeState target_state,
   3461                             Code* replacement_code) {
   3462   Address call_target_address = pc - kIntSize;
   3463   Address jns_instr_address = call_target_address - 3;
   3464   Address jns_offset_address = call_target_address - 2;
   3465 
   3466   switch (target_state) {
   3467     case INTERRUPT:
   3468       //     sub <profiling_counter>, <delta>  ;; Not changed
   3469       //     jns ok
   3470       //     call <interrupt stub>
   3471       //   ok:
   3472       *jns_instr_address = kJnsInstruction;
   3473       *jns_offset_address = kJnsOffset;
   3474       break;
   3475     case ON_STACK_REPLACEMENT:
   3476       //     sub <profiling_counter>, <delta>  ;; Not changed
   3477       //     nop
   3478       //     nop
   3479       //     call <on-stack replacment>
   3480       //   ok:
   3481       *jns_instr_address = kNopByteOne;
   3482       *jns_offset_address = kNopByteTwo;
   3483       break;
   3484   }
   3485 
   3486   Assembler::set_target_address_at(unoptimized_code->GetIsolate(),
   3487                                    call_target_address, unoptimized_code,
   3488                                    replacement_code->entry());
   3489   unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
   3490       unoptimized_code, call_target_address, replacement_code);
   3491 }
   3492 
   3493 
   3494 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
   3495     Isolate* isolate,
   3496     Code* unoptimized_code,
   3497     Address pc) {
   3498   Address call_target_address = pc - kIntSize;
   3499   Address jns_instr_address = call_target_address - 3;
   3500   DCHECK_EQ(kCallInstruction, *(call_target_address - 1));
   3501 
   3502   if (*jns_instr_address == kJnsInstruction) {
   3503     DCHECK_EQ(kJnsOffset, *(call_target_address - 2));
   3504     DCHECK_EQ(isolate->builtins()->InterruptCheck()->entry(),
   3505               Assembler::target_address_at(call_target_address,
   3506                                            unoptimized_code));
   3507     return INTERRUPT;
   3508   }
   3509 
   3510   DCHECK_EQ(kNopByteOne, *jns_instr_address);
   3511   DCHECK_EQ(kNopByteTwo, *(call_target_address - 2));
   3512 
   3513   DCHECK_EQ(
   3514       isolate->builtins()->OnStackReplacement()->entry(),
   3515       Assembler::target_address_at(call_target_address, unoptimized_code));
   3516   return ON_STACK_REPLACEMENT;
   3517 }
   3518 
   3519 }  // namespace internal
   3520 }  // namespace v8
   3521 
   3522 #endif  // V8_TARGET_ARCH_X64
   3523