Home | History | Annotate | Download | only in x64
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #if V8_TARGET_ARCH_X64
      6 
      7 #include "src/assembler-inl.h"
      8 #include "src/ast/compile-time-value.h"
      9 #include "src/ast/scopes.h"
     10 #include "src/builtins/builtins-constructor.h"
     11 #include "src/code-factory.h"
     12 #include "src/code-stubs.h"
     13 #include "src/codegen.h"
     14 #include "src/compilation-info.h"
     15 #include "src/compiler.h"
     16 #include "src/debug/debug.h"
     17 #include "src/full-codegen/full-codegen.h"
     18 #include "src/heap/heap-inl.h"
     19 #include "src/ic/ic.h"
     20 #include "src/objects-inl.h"
     21 
     22 namespace v8 {
     23 namespace internal {
     24 
     25 #define __ ACCESS_MASM(masm())
     26 
     27 class JumpPatchSite BASE_EMBEDDED {
     28  public:
     29   explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
     30 #ifdef DEBUG
     31     info_emitted_ = false;
     32 #endif
     33   }
     34 
     35   ~JumpPatchSite() {
     36     DCHECK(patch_site_.is_bound() == info_emitted_);
     37   }
     38 
     39   void EmitJumpIfNotSmi(Register reg,
     40                         Label* target,
     41                         Label::Distance near_jump = Label::kFar) {
     42     __ testb(reg, Immediate(kSmiTagMask));
     43     EmitJump(not_carry, target, near_jump);   // Always taken before patched.
     44   }
     45 
     46   void EmitJumpIfSmi(Register reg,
     47                      Label* target,
     48                      Label::Distance near_jump = Label::kFar) {
     49     __ testb(reg, Immediate(kSmiTagMask));
     50     EmitJump(carry, target, near_jump);  // Never taken before patched.
     51   }
     52 
     53   void EmitPatchInfo() {
     54     if (patch_site_.is_bound()) {
     55       int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
     56       DCHECK(is_uint8(delta_to_patch_site));
     57       __ testl(rax, Immediate(delta_to_patch_site));
     58 #ifdef DEBUG
     59       info_emitted_ = true;
     60 #endif
     61     } else {
     62       __ nop();  // Signals no inlined code.
     63     }
     64   }
     65 
     66  private:
     67   // jc will be patched with jz, jnc will become jnz.
     68   void EmitJump(Condition cc, Label* target, Label::Distance near_jump) {
     69     DCHECK(!patch_site_.is_bound() && !info_emitted_);
     70     DCHECK(cc == carry || cc == not_carry);
     71     __ bind(&patch_site_);
     72     __ j(cc, target, near_jump);
     73   }
     74 
     75   MacroAssembler* masm() { return masm_; }
     76   MacroAssembler* masm_;
     77   Label patch_site_;
     78 #ifdef DEBUG
     79   bool info_emitted_;
     80 #endif
     81 };
     82 
     83 
     84 // Generate code for a JS function.  On entry to the function the receiver
     85 // and arguments have been pushed on the stack left to right, with the
     86 // return address on top of them.  The actual argument count matches the
     87 // formal parameter count expected by the function.
     88 //
     89 // The live registers are:
     90 //   o rdi: the JS function object being called (i.e. ourselves)
     91 //   o rdx: the new target value
     92 //   o rsi: our context
     93 //   o rbp: our caller's frame pointer
     94 //   o rsp: stack pointer (pointing to return address)
     95 //
     96 // The function builds a JS frame.  Please see JavaScriptFrameConstants in
     97 // frames-x64.h for its layout.
     98 void FullCodeGenerator::Generate() {
     99   CompilationInfo* info = info_;
    100   DCHECK_EQ(scope(), info->scope());
    101   profiling_counter_ = isolate()->factory()->NewCell(
    102       Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
    103   SetFunctionPosition(literal());
    104   Comment cmnt(masm_, "[ function compiled by full code generator");
    105 
    106   ProfileEntryHookStub::MaybeCallEntryHook(masm_);
    107 
    108   if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
    109     StackArgumentsAccessor args(rsp, info->scope()->num_parameters());
    110     __ movp(rcx, args.GetReceiverOperand());
    111     __ AssertNotSmi(rcx);
    112     __ CmpObjectType(rcx, FIRST_JS_RECEIVER_TYPE, rcx);
    113     __ Assert(above_equal, kSloppyFunctionExpectsJSReceiverReceiver);
    114   }
    115 
    116   // Open a frame scope to indicate that there is a frame on the stack.  The
    117   // MANUAL indicates that the scope shouldn't actually generate code to set up
    118   // the frame (that is done below).
    119   FrameScope frame_scope(masm_, StackFrame::MANUAL);
    120 
    121   info->set_prologue_offset(masm_->pc_offset());
    122   __ Prologue(info->GeneratePreagedPrologue());
    123 
    124   // Increment invocation count for the function.
    125   {
    126     Comment cmnt(masm_, "[ Increment invocation count");
    127     __ movp(rcx, FieldOperand(rdi, JSFunction::kFeedbackVectorOffset));
    128     __ movp(rcx, FieldOperand(rcx, Cell::kValueOffset));
    129     __ SmiAddConstant(
    130         FieldOperand(rcx, FeedbackVector::kInvocationCountIndex * kPointerSize +
    131                               FeedbackVector::kHeaderSize),
    132         Smi::FromInt(1));
    133   }
    134 
    135   { Comment cmnt(masm_, "[ Allocate locals");
    136     int locals_count = info->scope()->num_stack_slots();
    137     OperandStackDepthIncrement(locals_count);
    138     if (locals_count == 1) {
    139       __ PushRoot(Heap::kUndefinedValueRootIndex);
    140     } else if (locals_count > 1) {
    141       if (locals_count >= 128) {
    142         Label ok;
    143         __ movp(rcx, rsp);
    144         __ subp(rcx, Immediate(locals_count * kPointerSize));
    145         __ CompareRoot(rcx, Heap::kRealStackLimitRootIndex);
    146         __ j(above_equal, &ok, Label::kNear);
    147         __ CallRuntime(Runtime::kThrowStackOverflow);
    148         __ bind(&ok);
    149       }
    150       __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
    151       const int kMaxPushes = 32;
    152       if (locals_count >= kMaxPushes) {
    153         int loop_iterations = locals_count / kMaxPushes;
    154         __ movp(rcx, Immediate(loop_iterations));
    155         Label loop_header;
    156         __ bind(&loop_header);
    157         // Do pushes.
    158         for (int i = 0; i < kMaxPushes; i++) {
    159           __ Push(rax);
    160         }
    161         // Continue loop if not done.
    162         __ decp(rcx);
    163         __ j(not_zero, &loop_header, Label::kNear);
    164       }
    165       int remaining = locals_count % kMaxPushes;
    166       // Emit the remaining pushes.
    167       for (int i  = 0; i < remaining; i++) {
    168         __ Push(rax);
    169       }
    170     }
    171   }
    172 
    173   bool function_in_register = true;
    174 
    175   // Possibly allocate a local context.
    176   if (info->scope()->NeedsContext()) {
    177     Comment cmnt(masm_, "[ Allocate context");
    178     bool need_write_barrier = true;
    179     int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
    180     // Argument to NewContext is the function, which is still in rdi.
    181     if (info->scope()->is_script_scope()) {
    182       __ Push(rdi);
    183       __ Push(info->scope()->scope_info());
    184       __ CallRuntime(Runtime::kNewScriptContext);
    185       PrepareForBailoutForId(BailoutId::ScriptContext(),
    186                              BailoutState::TOS_REGISTER);
    187       // The new target value is not used, clobbering is safe.
    188       DCHECK_NULL(info->scope()->new_target_var());
    189     } else {
    190       if (info->scope()->new_target_var() != nullptr) {
    191         __ Push(rdx);  // Preserve new target.
    192       }
    193       if (slots <=
    194           ConstructorBuiltinsAssembler::MaximumFunctionContextSlots()) {
    195         Callable callable = CodeFactory::FastNewFunctionContext(
    196             isolate(), info->scope()->scope_type());
    197         __ Set(FastNewFunctionContextDescriptor::SlotsRegister(), slots);
    198         __ Call(callable.code(), RelocInfo::CODE_TARGET);
    199         // Result of the FastNewFunctionContext builtin is always in new space.
    200         need_write_barrier = false;
    201       } else {
    202         __ Push(rdi);
    203         __ Push(Smi::FromInt(info->scope()->scope_type()));
    204         __ CallRuntime(Runtime::kNewFunctionContext);
    205       }
    206       if (info->scope()->new_target_var() != nullptr) {
    207         __ Pop(rdx);  // Restore new target.
    208       }
    209     }
    210     function_in_register = false;
    211     // Context is returned in rax.  It replaces the context passed to us.
    212     // It's saved in the stack and kept live in rsi.
    213     __ movp(rsi, rax);
    214     __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rax);
    215 
    216     // Copy any necessary parameters into the context.
    217     int num_parameters = info->scope()->num_parameters();
    218     int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
    219     for (int i = first_parameter; i < num_parameters; i++) {
    220       Variable* var =
    221           (i == -1) ? info->scope()->receiver() : info->scope()->parameter(i);
    222       if (var->IsContextSlot()) {
    223         int parameter_offset = StandardFrameConstants::kCallerSPOffset +
    224             (num_parameters - 1 - i) * kPointerSize;
    225         // Load parameter from stack.
    226         __ movp(rax, Operand(rbp, parameter_offset));
    227         // Store it in the context.
    228         int context_offset = Context::SlotOffset(var->index());
    229         __ movp(Operand(rsi, context_offset), rax);
    230         // Update the write barrier.  This clobbers rax and rbx.
    231         if (need_write_barrier) {
    232           __ RecordWriteContextSlot(
    233               rsi, context_offset, rax, rbx, kDontSaveFPRegs);
    234         } else if (FLAG_debug_code) {
    235           Label done;
    236           __ JumpIfInNewSpace(rsi, rax, &done, Label::kNear);
    237           __ Abort(kExpectedNewSpaceObject);
    238           __ bind(&done);
    239         }
    240       }
    241     }
    242   }
    243 
    244   // Register holding this function and new target are both trashed in case we
    245   // bailout here. But since that can happen only when new target is not used
    246   // and we allocate a context, the value of |function_in_register| is correct.
    247   PrepareForBailoutForId(BailoutId::FunctionContext(),
    248                          BailoutState::NO_REGISTERS);
    249 
    250   // We don't support new.target and rest parameters here.
    251   DCHECK_NULL(info->scope()->new_target_var());
    252   DCHECK_NULL(info->scope()->rest_parameter());
    253   DCHECK_NULL(info->scope()->this_function_var());
    254 
    255   // Possibly allocate an arguments object.
    256   DCHECK_EQ(scope(), info->scope());
    257   Variable* arguments = info->scope()->arguments();
    258   if (arguments != NULL) {
    259     // Arguments object must be allocated after the context object, in
    260     // case the "arguments" or ".arguments" variables are in the context.
    261     Comment cmnt(masm_, "[ Allocate arguments object");
    262     if (!function_in_register) {
    263       __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
    264     }
    265     if (is_strict(language_mode()) || !has_simple_parameters()) {
    266       __ call(isolate()->builtins()->FastNewStrictArguments(),
    267               RelocInfo::CODE_TARGET);
    268       RestoreContext();
    269     } else if (literal()->has_duplicate_parameters()) {
    270       __ Push(rdi);
    271       __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
    272     } else {
    273       __ call(isolate()->builtins()->FastNewSloppyArguments(),
    274               RelocInfo::CODE_TARGET);
    275       RestoreContext();
    276     }
    277 
    278     SetVar(arguments, rax, rbx, rdx);
    279   }
    280 
    281   if (FLAG_trace) {
    282     __ CallRuntime(Runtime::kTraceEnter);
    283   }
    284 
    285   // Visit the declarations and body unless there is an illegal
    286   // redeclaration.
    287   PrepareForBailoutForId(BailoutId::FunctionEntry(),
    288                          BailoutState::NO_REGISTERS);
    289   {
    290     Comment cmnt(masm_, "[ Declarations");
    291     VisitDeclarations(info->scope()->declarations());
    292   }
    293 
    294   // Assert that the declarations do not use ICs. Otherwise the debugger
    295   // won't be able to redirect a PC at an IC to the correct IC in newly
    296   // recompiled code.
    297   DCHECK_EQ(0, ic_total_count_);
    298 
    299   {
    300     Comment cmnt(masm_, "[ Stack check");
    301     PrepareForBailoutForId(BailoutId::Declarations(),
    302                            BailoutState::NO_REGISTERS);
    303     Label ok;
    304     __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
    305     __ j(above_equal, &ok, Label::kNear);
    306     __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
    307     __ bind(&ok);
    308   }
    309 
    310   {
    311     Comment cmnt(masm_, "[ Body");
    312     DCHECK(loop_depth() == 0);
    313     VisitStatements(literal()->body());
    314     DCHECK(loop_depth() == 0);
    315   }
    316 
    317   // Always emit a 'return undefined' in case control fell off the end of
    318   // the body.
    319   { Comment cmnt(masm_, "[ return <undefined>;");
    320     __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
    321     EmitReturnSequence();
    322   }
    323 }
    324 
    325 
    326 void FullCodeGenerator::ClearAccumulator() {
    327   __ Set(rax, 0);
    328 }
    329 
    330 
    331 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
    332   __ Move(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
    333   __ SmiAddConstant(FieldOperand(rbx, Cell::kValueOffset),
    334                     Smi::FromInt(-delta));
    335 }
    336 
    337 
    338 void FullCodeGenerator::EmitProfilingCounterReset() {
    339   int reset_value = FLAG_interrupt_budget;
    340   __ Move(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
    341   __ Move(kScratchRegister, Smi::FromInt(reset_value));
    342   __ movp(FieldOperand(rbx, Cell::kValueOffset), kScratchRegister);
    343 }
    344 
    345 
    346 static const byte kJnsOffset = kPointerSize == kInt64Size ? 0x1d : 0x14;
    347 
    348 
    349 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
    350                                                 Label* back_edge_target) {
    351   Comment cmnt(masm_, "[ Back edge bookkeeping");
    352   Label ok;
    353 
    354   DCHECK(back_edge_target->is_bound());
    355   int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
    356   int weight = Min(kMaxBackEdgeWeight,
    357                    Max(1, distance / kCodeSizeMultiplier));
    358   EmitProfilingCounterDecrement(weight);
    359 
    360   __ j(positive, &ok, Label::kNear);
    361   {
    362     PredictableCodeSizeScope predictible_code_size_scope(masm_, kJnsOffset);
    363     DontEmitDebugCodeScope dont_emit_debug_code_scope(masm_);
    364     __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
    365 
    366     // Record a mapping of this PC offset to the OSR id.  This is used to find
    367     // the AST id from the unoptimized code in order to use it as a key into
    368     // the deoptimization input data found in the optimized code.
    369     RecordBackEdge(stmt->OsrEntryId());
    370 
    371     EmitProfilingCounterReset();
    372   }
    373   __ bind(&ok);
    374 
    375   PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
    376   // Record a mapping of the OSR id to this PC.  This is used if the OSR
    377   // entry becomes the target of a bailout.  We don't expect it to be, but
    378   // we want it to work if it is.
    379   PrepareForBailoutForId(stmt->OsrEntryId(), BailoutState::NO_REGISTERS);
    380 }
    381 
    382 void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
    383     bool is_tail_call) {
    384   // Pretend that the exit is a backwards jump to the entry.
    385   int weight = 1;
    386   if (info_->ShouldSelfOptimize()) {
    387     weight = FLAG_interrupt_budget / FLAG_self_opt_count;
    388   } else {
    389     int distance = masm_->pc_offset();
    390     weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
    391   }
    392   EmitProfilingCounterDecrement(weight);
    393   Label ok;
    394   __ j(positive, &ok, Label::kNear);
    395   // Don't need to save result register if we are going to do a tail call.
    396   if (!is_tail_call) {
    397     __ Push(rax);
    398   }
    399   __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
    400   if (!is_tail_call) {
    401     __ Pop(rax);
    402   }
    403   EmitProfilingCounterReset();
    404   __ bind(&ok);
    405 }
    406 
    407 void FullCodeGenerator::EmitReturnSequence() {
    408   Comment cmnt(masm_, "[ Return sequence");
    409   if (return_label_.is_bound()) {
    410     __ jmp(&return_label_);
    411   } else {
    412     __ bind(&return_label_);
    413     if (FLAG_trace) {
    414       __ Push(rax);
    415       __ CallRuntime(Runtime::kTraceExit);
    416     }
    417     EmitProfilingCounterHandlingForReturnSequence(false);
    418 
    419     SetReturnPosition(literal());
    420     __ leave();
    421 
    422     int arg_count = info_->scope()->num_parameters() + 1;
    423     int arguments_bytes = arg_count * kPointerSize;
    424     __ Ret(arguments_bytes, rcx);
    425   }
    426 }
    427 
    428 void FullCodeGenerator::RestoreContext() {
    429   __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
    430 }
    431 
    432 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
    433   DCHECK(var->IsStackAllocated() || var->IsContextSlot());
    434   MemOperand operand = codegen()->VarOperand(var, result_register());
    435   codegen()->PushOperand(operand);
    436 }
    437 
    438 
    439 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
    440 }
    441 
    442 
    443 void FullCodeGenerator::AccumulatorValueContext::Plug(
    444     Heap::RootListIndex index) const {
    445   __ LoadRoot(result_register(), index);
    446 }
    447 
    448 
    449 void FullCodeGenerator::StackValueContext::Plug(
    450     Heap::RootListIndex index) const {
    451   codegen()->OperandStackDepthIncrement(1);
    452   __ PushRoot(index);
    453 }
    454 
    455 
    456 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
    457   codegen()->PrepareForBailoutBeforeSplit(condition(),
    458                                           true,
    459                                           true_label_,
    460                                           false_label_);
    461   if (index == Heap::kUndefinedValueRootIndex ||
    462       index == Heap::kNullValueRootIndex ||
    463       index == Heap::kFalseValueRootIndex) {
    464     if (false_label_ != fall_through_) __ jmp(false_label_);
    465   } else if (index == Heap::kTrueValueRootIndex) {
    466     if (true_label_ != fall_through_) __ jmp(true_label_);
    467   } else {
    468     __ LoadRoot(result_register(), index);
    469     codegen()->DoTest(this);
    470   }
    471 }
    472 
    473 
    474 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
    475 }
    476 
    477 
    478 void FullCodeGenerator::AccumulatorValueContext::Plug(
    479     Handle<Object> lit) const {
    480   if (lit->IsSmi()) {
    481     __ SafeMove(result_register(), Smi::cast(*lit));
    482   } else {
    483     __ Move(result_register(), lit);
    484   }
    485 }
    486 
    487 
    488 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
    489   codegen()->OperandStackDepthIncrement(1);
    490   if (lit->IsSmi()) {
    491     __ SafePush(Smi::cast(*lit));
    492   } else {
    493     __ Push(lit);
    494   }
    495 }
    496 
    497 
    498 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
    499   codegen()->PrepareForBailoutBeforeSplit(condition(),
    500                                           true,
    501                                           true_label_,
    502                                           false_label_);
    503   DCHECK(lit->IsNullOrUndefined(isolate()) || !lit->IsUndetectable());
    504   if (lit->IsNullOrUndefined(isolate()) || lit->IsFalse(isolate())) {
    505     if (false_label_ != fall_through_) __ jmp(false_label_);
    506   } else if (lit->IsTrue(isolate()) || lit->IsJSObject()) {
    507     if (true_label_ != fall_through_) __ jmp(true_label_);
    508   } else if (lit->IsString()) {
    509     if (String::cast(*lit)->length() == 0) {
    510       if (false_label_ != fall_through_) __ jmp(false_label_);
    511     } else {
    512       if (true_label_ != fall_through_) __ jmp(true_label_);
    513     }
    514   } else if (lit->IsSmi()) {
    515     if (Smi::cast(*lit)->value() == 0) {
    516       if (false_label_ != fall_through_) __ jmp(false_label_);
    517     } else {
    518       if (true_label_ != fall_through_) __ jmp(true_label_);
    519     }
    520   } else {
    521     // For simplicity we always test the accumulator register.
    522     __ Move(result_register(), lit);
    523     codegen()->DoTest(this);
    524   }
    525 }
    526 
    527 
    528 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
    529                                                        Register reg) const {
    530   DCHECK(count > 0);
    531   if (count > 1) codegen()->DropOperands(count - 1);
    532   __ movp(Operand(rsp, 0), reg);
    533 }
    534 
    535 
    536 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
    537                                             Label* materialize_false) const {
    538   DCHECK(materialize_true == materialize_false);
    539   __ bind(materialize_true);
    540 }
    541 
    542 
    543 void FullCodeGenerator::AccumulatorValueContext::Plug(
    544     Label* materialize_true,
    545     Label* materialize_false) const {
    546   Label done;
    547   __ bind(materialize_true);
    548   __ Move(result_register(), isolate()->factory()->true_value());
    549   __ jmp(&done, Label::kNear);
    550   __ bind(materialize_false);
    551   __ Move(result_register(), isolate()->factory()->false_value());
    552   __ bind(&done);
    553 }
    554 
    555 
    556 void FullCodeGenerator::StackValueContext::Plug(
    557     Label* materialize_true,
    558     Label* materialize_false) const {
    559   codegen()->OperandStackDepthIncrement(1);
    560   Label done;
    561   __ bind(materialize_true);
    562   __ Push(isolate()->factory()->true_value());
    563   __ jmp(&done, Label::kNear);
    564   __ bind(materialize_false);
    565   __ Push(isolate()->factory()->false_value());
    566   __ bind(&done);
    567 }
    568 
    569 
    570 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
    571                                           Label* materialize_false) const {
    572   DCHECK(materialize_true == true_label_);
    573   DCHECK(materialize_false == false_label_);
    574 }
    575 
    576 
    577 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
    578   Heap::RootListIndex value_root_index =
    579       flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
    580   __ LoadRoot(result_register(), value_root_index);
    581 }
    582 
    583 
    584 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
    585   codegen()->OperandStackDepthIncrement(1);
    586   Heap::RootListIndex value_root_index =
    587       flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
    588   __ PushRoot(value_root_index);
    589 }
    590 
    591 
    592 void FullCodeGenerator::TestContext::Plug(bool flag) const {
    593   codegen()->PrepareForBailoutBeforeSplit(condition(),
    594                                           true,
    595                                           true_label_,
    596                                           false_label_);
    597   if (flag) {
    598     if (true_label_ != fall_through_) __ jmp(true_label_);
    599   } else {
    600     if (false_label_ != fall_through_) __ jmp(false_label_);
    601   }
    602 }
    603 
    604 
    605 void FullCodeGenerator::DoTest(Expression* condition,
    606                                Label* if_true,
    607                                Label* if_false,
    608                                Label* fall_through) {
    609   Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate());
    610   CallIC(ic, condition->test_id());
    611   __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
    612   Split(equal, if_true, if_false, fall_through);
    613 }
    614 
    615 
    616 void FullCodeGenerator::Split(Condition cc,
    617                               Label* if_true,
    618                               Label* if_false,
    619                               Label* fall_through) {
    620   if (if_false == fall_through) {
    621     __ j(cc, if_true);
    622   } else if (if_true == fall_through) {
    623     __ j(NegateCondition(cc), if_false);
    624   } else {
    625     __ j(cc, if_true);
    626     __ jmp(if_false);
    627   }
    628 }
    629 
    630 
    631 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
    632   DCHECK(var->IsStackAllocated());
    633   // Offset is negative because higher indexes are at lower addresses.
    634   int offset = -var->index() * kPointerSize;
    635   // Adjust by a (parameter or local) base offset.
    636   if (var->IsParameter()) {
    637     offset += kFPOnStackSize + kPCOnStackSize +
    638               (info_->scope()->num_parameters() - 1) * kPointerSize;
    639   } else {
    640     offset += JavaScriptFrameConstants::kLocal0Offset;
    641   }
    642   return Operand(rbp, offset);
    643 }
    644 
    645 
    646 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
    647   DCHECK(var->IsContextSlot() || var->IsStackAllocated());
    648   if (var->IsContextSlot()) {
    649     int context_chain_length = scope()->ContextChainLength(var->scope());
    650     __ LoadContext(scratch, context_chain_length);
    651     return ContextOperand(scratch, var->index());
    652   } else {
    653     return StackOperand(var);
    654   }
    655 }
    656 
    657 
    658 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
    659   DCHECK(var->IsContextSlot() || var->IsStackAllocated());
    660   MemOperand location = VarOperand(var, dest);
    661   __ movp(dest, location);
    662 }
    663 
    664 
    665 void FullCodeGenerator::SetVar(Variable* var,
    666                                Register src,
    667                                Register scratch0,
    668                                Register scratch1) {
    669   DCHECK(var->IsContextSlot() || var->IsStackAllocated());
    670   DCHECK(!scratch0.is(src));
    671   DCHECK(!scratch0.is(scratch1));
    672   DCHECK(!scratch1.is(src));
    673   MemOperand location = VarOperand(var, scratch0);
    674   __ movp(location, src);
    675 
    676   // Emit the write barrier code if the location is in the heap.
    677   if (var->IsContextSlot()) {
    678     int offset = Context::SlotOffset(var->index());
    679     __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
    680   }
    681 }
    682 
    683 
    684 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
    685                                                      bool should_normalize,
    686                                                      Label* if_true,
    687                                                      Label* if_false) {
    688   // Only prepare for bailouts before splits if we're in a test
    689   // context. Otherwise, we let the Visit function deal with the
    690   // preparation to avoid preparing with the same AST id twice.
    691   if (!context()->IsTest()) return;
    692 
    693   Label skip;
    694   if (should_normalize) __ jmp(&skip, Label::kNear);
    695   PrepareForBailout(expr, BailoutState::TOS_REGISTER);
    696   if (should_normalize) {
    697     __ CompareRoot(rax, Heap::kTrueValueRootIndex);
    698     Split(equal, if_true, if_false, NULL);
    699     __ bind(&skip);
    700   }
    701 }
    702 
    703 
    704 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
    705   // The variable in the declaration always resides in the current context.
    706   DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
    707   if (FLAG_debug_code) {
    708     // Check that we're not inside a with or catch context.
    709     __ movp(rbx, FieldOperand(rsi, HeapObject::kMapOffset));
    710     __ CompareRoot(rbx, Heap::kWithContextMapRootIndex);
    711     __ Check(not_equal, kDeclarationInWithContext);
    712     __ CompareRoot(rbx, Heap::kCatchContextMapRootIndex);
    713     __ Check(not_equal, kDeclarationInCatchContext);
    714   }
    715 }
    716 
    717 
    718 void FullCodeGenerator::VisitVariableDeclaration(
    719     VariableDeclaration* declaration) {
    720   VariableProxy* proxy = declaration->proxy();
    721   Variable* variable = proxy->var();
    722   switch (variable->location()) {
    723     case VariableLocation::UNALLOCATED: {
    724       DCHECK(!variable->binding_needs_init());
    725       globals_->Add(variable->name(), zone());
    726       FeedbackSlot slot = proxy->VariableFeedbackSlot();
    727       DCHECK(!slot.IsInvalid());
    728       globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
    729       globals_->Add(isolate()->factory()->undefined_value(), zone());
    730       globals_->Add(isolate()->factory()->undefined_value(), zone());
    731       break;
    732     }
    733     case VariableLocation::PARAMETER:
    734     case VariableLocation::LOCAL:
    735       if (variable->binding_needs_init()) {
    736         Comment cmnt(masm_, "[ VariableDeclaration");
    737         __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
    738         __ movp(StackOperand(variable), kScratchRegister);
    739       }
    740       break;
    741 
    742     case VariableLocation::CONTEXT:
    743       if (variable->binding_needs_init()) {
    744         Comment cmnt(masm_, "[ VariableDeclaration");
    745         EmitDebugCheckDeclarationContext(variable);
    746         __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
    747         __ movp(ContextOperand(rsi, variable->index()), kScratchRegister);
    748         // No write barrier since the hole value is in old space.
    749         PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
    750       }
    751       break;
    752 
    753     case VariableLocation::LOOKUP:
    754     case VariableLocation::MODULE:
    755       UNREACHABLE();
    756   }
    757 }
    758 
    759 
    760 void FullCodeGenerator::VisitFunctionDeclaration(
    761     FunctionDeclaration* declaration) {
    762   VariableProxy* proxy = declaration->proxy();
    763   Variable* variable = proxy->var();
    764   switch (variable->location()) {
    765     case VariableLocation::UNALLOCATED: {
    766       globals_->Add(variable->name(), zone());
    767       FeedbackSlot slot = proxy->VariableFeedbackSlot();
    768       DCHECK(!slot.IsInvalid());
    769       globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
    770 
    771       // We need the slot where the literals array lives, too.
    772       slot = declaration->fun()->LiteralFeedbackSlot();
    773       DCHECK(!slot.IsInvalid());
    774       globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
    775 
    776       Handle<SharedFunctionInfo> function =
    777           Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
    778       // Check for stack-overflow exception.
    779       if (function.is_null()) return SetStackOverflow();
    780       globals_->Add(function, zone());
    781       break;
    782     }
    783 
    784     case VariableLocation::PARAMETER:
    785     case VariableLocation::LOCAL: {
    786       Comment cmnt(masm_, "[ FunctionDeclaration");
    787       VisitForAccumulatorValue(declaration->fun());
    788       __ movp(StackOperand(variable), result_register());
    789       break;
    790     }
    791 
    792     case VariableLocation::CONTEXT: {
    793       Comment cmnt(masm_, "[ FunctionDeclaration");
    794       EmitDebugCheckDeclarationContext(variable);
    795       VisitForAccumulatorValue(declaration->fun());
    796       __ movp(ContextOperand(rsi, variable->index()), result_register());
    797       int offset = Context::SlotOffset(variable->index());
    798       // We know that we have written a function, which is not a smi.
    799       __ RecordWriteContextSlot(rsi,
    800                                 offset,
    801                                 result_register(),
    802                                 rcx,
    803                                 kDontSaveFPRegs,
    804                                 EMIT_REMEMBERED_SET,
    805                                 OMIT_SMI_CHECK);
    806       PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
    807       break;
    808     }
    809 
    810     case VariableLocation::LOOKUP:
    811     case VariableLocation::MODULE:
    812       UNREACHABLE();
    813   }
    814 }
    815 
    816 
    817 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
    818   // Call the runtime to declare the globals.
    819   __ Push(pairs);
    820   __ Push(Smi::FromInt(DeclareGlobalsFlags()));
    821   __ EmitLoadFeedbackVector(rax);
    822   __ Push(rax);
    823   __ CallRuntime(Runtime::kDeclareGlobals);
    824   // Return value is ignored.
    825 }
    826 
    827 
    828 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
    829   Comment cmnt(masm_, "[ SwitchStatement");
    830   Breakable nested_statement(this, stmt);
    831   SetStatementPosition(stmt);
    832 
    833   // Keep the switch value on the stack until a case matches.
    834   VisitForStackValue(stmt->tag());
    835   PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
    836 
    837   ZoneList<CaseClause*>* clauses = stmt->cases();
    838   CaseClause* default_clause = NULL;  // Can occur anywhere in the list.
    839 
    840   Label next_test;  // Recycled for each test.
    841   // Compile all the tests with branches to their bodies.
    842   for (int i = 0; i < clauses->length(); i++) {
    843     CaseClause* clause = clauses->at(i);
    844     clause->body_target()->Unuse();
    845 
    846     // The default is not a test, but remember it as final fall through.
    847     if (clause->is_default()) {
    848       default_clause = clause;
    849       continue;
    850     }
    851 
    852     Comment cmnt(masm_, "[ Case comparison");
    853     __ bind(&next_test);
    854     next_test.Unuse();
    855 
    856     // Compile the label expression.
    857     VisitForAccumulatorValue(clause->label());
    858 
    859     // Perform the comparison as if via '==='.
    860     __ movp(rdx, Operand(rsp, 0));  // Switch value.
    861     bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
    862     JumpPatchSite patch_site(masm_);
    863     if (inline_smi_code) {
    864       Label slow_case;
    865       __ movp(rcx, rdx);
    866       __ orp(rcx, rax);
    867       patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
    868 
    869       __ cmpp(rdx, rax);
    870       __ j(not_equal, &next_test);
    871       __ Drop(1);  // Switch value is no longer needed.
    872       __ jmp(clause->body_target());
    873       __ bind(&slow_case);
    874     }
    875 
    876     // Record position before stub call for type feedback.
    877     SetExpressionPosition(clause);
    878     Handle<Code> ic =
    879         CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
    880     CallIC(ic, clause->CompareId());
    881     patch_site.EmitPatchInfo();
    882 
    883     Label skip;
    884     __ jmp(&skip, Label::kNear);
    885     PrepareForBailout(clause, BailoutState::TOS_REGISTER);
    886     __ CompareRoot(rax, Heap::kTrueValueRootIndex);
    887     __ j(not_equal, &next_test);
    888     __ Drop(1);
    889     __ jmp(clause->body_target());
    890     __ bind(&skip);
    891 
    892     __ testp(rax, rax);
    893     __ j(not_equal, &next_test);
    894     __ Drop(1);  // Switch value is no longer needed.
    895     __ jmp(clause->body_target());
    896   }
    897 
    898   // Discard the test value and jump to the default if present, otherwise to
    899   // the end of the statement.
    900   __ bind(&next_test);
    901   DropOperands(1);  // Switch value is no longer needed.
    902   if (default_clause == NULL) {
    903     __ jmp(nested_statement.break_label());
    904   } else {
    905     __ jmp(default_clause->body_target());
    906   }
    907 
    908   // Compile all the case bodies.
    909   for (int i = 0; i < clauses->length(); i++) {
    910     Comment cmnt(masm_, "[ Case body");
    911     CaseClause* clause = clauses->at(i);
    912     __ bind(clause->body_target());
    913     PrepareForBailoutForId(clause->EntryId(), BailoutState::NO_REGISTERS);
    914     VisitStatements(clause->statements());
    915   }
    916 
    917   __ bind(nested_statement.break_label());
    918   PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
    919 }
    920 
    921 
    922 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
    923   Comment cmnt(masm_, "[ ForInStatement");
    924   SetStatementPosition(stmt, SKIP_BREAK);
    925 
    926   FeedbackSlot slot = stmt->ForInFeedbackSlot();
    927 
    928   // Get the object to enumerate over.
    929   SetExpressionAsStatementPosition(stmt->enumerable());
    930   VisitForAccumulatorValue(stmt->enumerable());
    931   OperandStackDepthIncrement(5);
    932 
    933   Label loop, exit;
    934   Iteration loop_statement(this, stmt);
    935   increment_loop_depth();
    936 
    937   // If the object is null or undefined, skip over the loop, otherwise convert
    938   // it to a JS receiver.  See ECMA-262 version 5, section 12.6.4.
    939   Label convert, done_convert;
    940   __ JumpIfSmi(rax, &convert, Label::kNear);
    941   __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx);
    942   __ j(above_equal, &done_convert, Label::kNear);
    943   __ CompareRoot(rax, Heap::kNullValueRootIndex);
    944   __ j(equal, &exit);
    945   __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
    946   __ j(equal, &exit);
    947   __ bind(&convert);
    948   __ Call(isolate()->builtins()->ToObject(), RelocInfo::CODE_TARGET);
    949   RestoreContext();
    950   __ bind(&done_convert);
    951   PrepareForBailoutForId(stmt->ToObjectId(), BailoutState::TOS_REGISTER);
    952   __ Push(rax);
    953 
    954   // Check cache validity in generated code. If we cannot guarantee cache
    955   // validity, call the runtime system to check cache validity or get the
    956   // property names in a fixed array. Note: Proxies never have an enum cache,
    957   // so will always take the slow path.
    958   Label call_runtime;
    959   __ CheckEnumCache(&call_runtime);
    960 
    961   // The enum cache is valid.  Load the map of the object being
    962   // iterated over and use the cache for the iteration.
    963   Label use_cache;
    964   __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset));
    965   __ jmp(&use_cache, Label::kNear);
    966 
    967   // Get the set of properties to enumerate.
    968   __ bind(&call_runtime);
    969   __ Push(rax);  // Duplicate the enumerable object on the stack.
    970   __ CallRuntime(Runtime::kForInEnumerate);
    971   PrepareForBailoutForId(stmt->EnumId(), BailoutState::TOS_REGISTER);
    972 
    973   // If we got a map from the runtime call, we can do a fast
    974   // modification check. Otherwise, we got a fixed array, and we have
    975   // to do a slow check.
    976   Label fixed_array;
    977   __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
    978                  Heap::kMetaMapRootIndex);
    979   __ j(not_equal, &fixed_array);
    980 
    981   // We got a map in register rax. Get the enumeration cache from it.
    982   __ bind(&use_cache);
    983 
    984   Label no_descriptors;
    985 
    986   __ EnumLength(rdx, rax);
    987   __ Cmp(rdx, Smi::kZero);
    988   __ j(equal, &no_descriptors);
    989 
    990   __ LoadInstanceDescriptors(rax, rcx);
    991   __ movp(rcx, FieldOperand(rcx, DescriptorArray::kEnumCacheOffset));
    992   __ movp(rcx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset));
    993 
    994   // Set up the four remaining stack slots.
    995   __ Push(rax);  // Map.
    996   __ Push(rcx);  // Enumeration cache.
    997   __ Push(rdx);  // Number of valid entries for the map in the enum cache.
    998   __ Push(Smi::kZero);  // Initial index.
    999   __ jmp(&loop);
   1000 
   1001   __ bind(&no_descriptors);
   1002   __ addp(rsp, Immediate(kPointerSize));
   1003   __ jmp(&exit);
   1004 
   1005   // We got a fixed array in register rax. Iterate through that.
   1006   __ bind(&fixed_array);
   1007 
   1008   __ movp(rcx, Operand(rsp, 0 * kPointerSize));  // Get enumerated object
   1009   __ Push(Smi::FromInt(1));                      // Smi(1) indicates slow check
   1010   __ Push(rax);  // Array
   1011   __ movp(rax, FieldOperand(rax, FixedArray::kLengthOffset));
   1012   __ Push(rax);  // Fixed array length (as smi).
   1013   PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
   1014   __ Push(Smi::kZero);  // Initial index.
   1015 
   1016   // Generate code for doing the condition check.
   1017   __ bind(&loop);
   1018   SetExpressionAsStatementPosition(stmt->each());
   1019 
   1020   __ movp(rax, Operand(rsp, 0 * kPointerSize));  // Get the current index.
   1021   __ cmpp(rax, Operand(rsp, 1 * kPointerSize));  // Compare to the array length.
   1022   __ j(above_equal, loop_statement.break_label());
   1023 
   1024   // Get the current entry of the array into register rax.
   1025   __ movp(rbx, Operand(rsp, 2 * kPointerSize));
   1026   SmiIndex index = masm()->SmiToIndex(rax, rax, kPointerSizeLog2);
   1027   __ movp(rax,
   1028           FieldOperand(rbx, index.reg, index.scale, FixedArray::kHeaderSize));
   1029 
   1030   // Get the expected map from the stack or a smi in the
   1031   // permanent slow case into register rdx.
   1032   __ movp(rdx, Operand(rsp, 3 * kPointerSize));
   1033 
   1034   // Check if the expected map still matches that of the enumerable.
   1035   // If not, we may have to filter the key.
   1036   Label update_each;
   1037   __ movp(rbx, Operand(rsp, 4 * kPointerSize));
   1038   __ cmpp(rdx, FieldOperand(rbx, HeapObject::kMapOffset));
   1039   __ j(equal, &update_each, Label::kNear);
   1040 
   1041   // We need to filter the key, record slow-path here.
   1042   int const vector_index = SmiFromSlot(slot)->value();
   1043   __ EmitLoadFeedbackVector(rdx);
   1044   __ Move(FieldOperand(rdx, FixedArray::OffsetOfElementAt(vector_index)),
   1045           FeedbackVector::MegamorphicSentinel(isolate()));
   1046 
   1047   // rax contains the key. The receiver in rbx is the second argument to
   1048   // ForInFilter. ForInFilter returns undefined if the receiver doesn't
   1049   // have the key or returns the name-converted key.
   1050   __ Call(isolate()->builtins()->ForInFilter(), RelocInfo::CODE_TARGET);
   1051   RestoreContext();
   1052   PrepareForBailoutForId(stmt->FilterId(), BailoutState::TOS_REGISTER);
   1053   __ JumpIfRoot(result_register(), Heap::kUndefinedValueRootIndex,
   1054                 loop_statement.continue_label());
   1055 
   1056   // Update the 'each' property or variable from the possibly filtered
   1057   // entry in register rax.
   1058   __ bind(&update_each);
   1059   // Perform the assignment as if via '='.
   1060   { EffectContext context(this);
   1061     EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
   1062     PrepareForBailoutForId(stmt->AssignmentId(), BailoutState::NO_REGISTERS);
   1063   }
   1064 
   1065   // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
   1066   PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS);
   1067   // Generate code for the body of the loop.
   1068   Visit(stmt->body());
   1069 
   1070   // Generate code for going to the next element by incrementing the
   1071   // index (smi) stored on top of the stack.
   1072   __ bind(loop_statement.continue_label());
   1073   PrepareForBailoutForId(stmt->IncrementId(), BailoutState::NO_REGISTERS);
   1074   __ SmiAddConstant(Operand(rsp, 0 * kPointerSize), Smi::FromInt(1));
   1075 
   1076   EmitBackEdgeBookkeeping(stmt, &loop);
   1077   __ jmp(&loop);
   1078 
   1079   // Remove the pointers stored on the stack.
   1080   __ bind(loop_statement.break_label());
   1081   DropOperands(5);
   1082 
   1083   // Exit and decrement the loop depth.
   1084   PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
   1085   __ bind(&exit);
   1086   decrement_loop_depth();
   1087 }
   1088 
   1089 void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
   1090                                           FeedbackSlot slot) {
   1091   DCHECK(NeedsHomeObject(initializer));
   1092   __ movp(StoreDescriptor::ReceiverRegister(), Operand(rsp, 0));
   1093   __ movp(StoreDescriptor::ValueRegister(),
   1094           Operand(rsp, offset * kPointerSize));
   1095   CallStoreIC(slot, isolate()->factory()->home_object_symbol());
   1096 }
   1097 
   1098 void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
   1099                                                      int offset,
   1100                                                      FeedbackSlot slot) {
   1101   DCHECK(NeedsHomeObject(initializer));
   1102   __ movp(StoreDescriptor::ReceiverRegister(), rax);
   1103   __ movp(StoreDescriptor::ValueRegister(),
   1104           Operand(rsp, offset * kPointerSize));
   1105   CallStoreIC(slot, isolate()->factory()->home_object_symbol());
   1106 }
   1107 
   1108 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
   1109                                          TypeofMode typeof_mode) {
   1110   // Record position before possible IC call.
   1111   SetExpressionPosition(proxy);
   1112   PrepareForBailoutForId(proxy->BeforeId(), BailoutState::NO_REGISTERS);
   1113   Variable* var = proxy->var();
   1114 
   1115   // Two cases: global variable, and all other types of variables.
   1116   switch (var->location()) {
   1117     case VariableLocation::UNALLOCATED: {
   1118       Comment cmnt(masm_, "[ Global variable");
   1119       EmitGlobalVariableLoad(proxy, typeof_mode);
   1120       context()->Plug(rax);
   1121       break;
   1122     }
   1123 
   1124     case VariableLocation::PARAMETER:
   1125     case VariableLocation::LOCAL:
   1126     case VariableLocation::CONTEXT: {
   1127       DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
   1128       Comment cmnt(masm_, var->IsContextSlot() ? "[ Context slot"
   1129                                                : "[ Stack slot");
   1130       if (proxy->hole_check_mode() == HoleCheckMode::kRequired) {
   1131         // Throw a reference error when using an uninitialized let/const
   1132         // binding in harmony mode.
   1133         DCHECK(IsLexicalVariableMode(var->mode()));
   1134         Label done;
   1135         GetVar(rax, var);
   1136         __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
   1137         __ j(not_equal, &done, Label::kNear);
   1138         __ Push(var->name());
   1139         __ CallRuntime(Runtime::kThrowReferenceError);
   1140         __ bind(&done);
   1141         context()->Plug(rax);
   1142         break;
   1143       }
   1144       context()->Plug(var);
   1145       break;
   1146     }
   1147 
   1148     case VariableLocation::LOOKUP:
   1149     case VariableLocation::MODULE:
   1150       UNREACHABLE();
   1151   }
   1152 }
   1153 
   1154 
   1155 void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
   1156   Expression* expression = (property == NULL) ? NULL : property->value();
   1157   if (expression == NULL) {
   1158     OperandStackDepthIncrement(1);
   1159     __ PushRoot(Heap::kNullValueRootIndex);
   1160   } else {
   1161     VisitForStackValue(expression);
   1162     if (NeedsHomeObject(expression)) {
   1163       DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
   1164              property->kind() == ObjectLiteral::Property::SETTER);
   1165       int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
   1166       EmitSetHomeObject(expression, offset, property->GetSlot());
   1167     }
   1168   }
   1169 }
   1170 
   1171 
   1172 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
   1173   Comment cmnt(masm_, "[ ObjectLiteral");
   1174 
   1175   Handle<BoilerplateDescription> constant_properties =
   1176       expr->GetOrBuildConstantProperties(isolate());
   1177   int flags = expr->ComputeFlags();
   1178   if (MustCreateObjectLiteralWithRuntime(expr)) {
   1179     __ Push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
   1180     __ Push(SmiFromSlot(expr->literal_slot()));
   1181     __ Push(constant_properties);
   1182     __ Push(Smi::FromInt(flags));
   1183     __ CallRuntime(Runtime::kCreateObjectLiteral);
   1184   } else {
   1185     __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
   1186     __ Move(rbx, SmiFromSlot(expr->literal_slot()));
   1187     __ Move(rcx, constant_properties);
   1188     __ Move(rdx, Smi::FromInt(flags));
   1189     Callable callable = CodeFactory::FastCloneShallowObject(
   1190         isolate(), expr->properties_count());
   1191     __ Call(callable.code(), RelocInfo::CODE_TARGET);
   1192     RestoreContext();
   1193   }
   1194   PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
   1195 
   1196   // If result_saved is true the result is on top of the stack.  If
   1197   // result_saved is false the result is in rax.
   1198   bool result_saved = false;
   1199 
   1200   AccessorTable accessor_table(zone());
   1201   for (int i = 0; i < expr->properties()->length(); i++) {
   1202     ObjectLiteral::Property* property = expr->properties()->at(i);
   1203     DCHECK(!property->is_computed_name());
   1204     if (property->IsCompileTimeValue()) continue;
   1205 
   1206     Literal* key = property->key()->AsLiteral();
   1207     Expression* value = property->value();
   1208     if (!result_saved) {
   1209       PushOperand(rax);  // Save result on the stack
   1210       result_saved = true;
   1211     }
   1212     switch (property->kind()) {
   1213       case ObjectLiteral::Property::SPREAD:
   1214       case ObjectLiteral::Property::CONSTANT:
   1215         UNREACHABLE();
   1216       case ObjectLiteral::Property::MATERIALIZED_LITERAL:
   1217         DCHECK(!CompileTimeValue::IsCompileTimeValue(value));
   1218         // Fall through.
   1219       case ObjectLiteral::Property::COMPUTED:
   1220         // It is safe to use [[Put]] here because the boilerplate already
   1221         // contains computed properties with an uninitialized value.
   1222         if (key->IsStringLiteral()) {
   1223           DCHECK(key->IsPropertyName());
   1224           if (property->emit_store()) {
   1225             VisitForAccumulatorValue(value);
   1226             DCHECK(StoreDescriptor::ValueRegister().is(rax));
   1227             __ movp(StoreDescriptor::ReceiverRegister(), Operand(rsp, 0));
   1228             CallStoreIC(property->GetSlot(0), key->value(), true);
   1229             PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
   1230 
   1231             if (NeedsHomeObject(value)) {
   1232               EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
   1233             }
   1234           } else {
   1235             VisitForEffect(value);
   1236           }
   1237           break;
   1238         }
   1239         PushOperand(Operand(rsp, 0));  // Duplicate receiver.
   1240         VisitForStackValue(key);
   1241         VisitForStackValue(value);
   1242         if (property->emit_store()) {
   1243           if (NeedsHomeObject(value)) {
   1244             EmitSetHomeObject(value, 2, property->GetSlot());
   1245           }
   1246           PushOperand(Smi::FromInt(SLOPPY));  // Language mode
   1247           CallRuntimeWithOperands(Runtime::kSetProperty);
   1248         } else {
   1249           DropOperands(3);
   1250         }
   1251         break;
   1252       case ObjectLiteral::Property::PROTOTYPE:
   1253         PushOperand(Operand(rsp, 0));  // Duplicate receiver.
   1254         VisitForStackValue(value);
   1255         DCHECK(property->emit_store());
   1256         CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
   1257         PrepareForBailoutForId(expr->GetIdForPropertySet(i),
   1258                                BailoutState::NO_REGISTERS);
   1259         break;
   1260       case ObjectLiteral::Property::GETTER:
   1261         if (property->emit_store()) {
   1262           AccessorTable::Iterator it = accessor_table.lookup(key);
   1263           it->second->bailout_id = expr->GetIdForPropertySet(i);
   1264           it->second->getter = property;
   1265         }
   1266         break;
   1267       case ObjectLiteral::Property::SETTER:
   1268         if (property->emit_store()) {
   1269           AccessorTable::Iterator it = accessor_table.lookup(key);
   1270           it->second->bailout_id = expr->GetIdForPropertySet(i);
   1271           it->second->setter = property;
   1272         }
   1273         break;
   1274     }
   1275   }
   1276 
   1277   // Emit code to define accessors, using only a single call to the runtime for
   1278   // each pair of corresponding getters and setters.
   1279   for (AccessorTable::Iterator it = accessor_table.begin();
   1280        it != accessor_table.end();
   1281        ++it) {
   1282     PushOperand(Operand(rsp, 0));  // Duplicate receiver.
   1283     VisitForStackValue(it->first);
   1284     EmitAccessor(it->second->getter);
   1285     EmitAccessor(it->second->setter);
   1286     PushOperand(Smi::FromInt(NONE));
   1287     CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
   1288     PrepareForBailoutForId(it->second->bailout_id, BailoutState::NO_REGISTERS);
   1289   }
   1290 
   1291   if (result_saved) {
   1292     context()->PlugTOS();
   1293   } else {
   1294     context()->Plug(rax);
   1295   }
   1296 }
   1297 
   1298 
   1299 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
   1300   Comment cmnt(masm_, "[ ArrayLiteral");
   1301 
   1302   Handle<ConstantElementsPair> constant_elements =
   1303       expr->GetOrBuildConstantElements(isolate());
   1304 
   1305   if (MustCreateArrayLiteralWithRuntime(expr)) {
   1306     __ Push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
   1307     __ Push(SmiFromSlot(expr->literal_slot()));
   1308     __ Push(constant_elements);
   1309     __ Push(Smi::FromInt(expr->ComputeFlags()));
   1310     __ CallRuntime(Runtime::kCreateArrayLiteral);
   1311   } else {
   1312     __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
   1313     __ Move(rbx, SmiFromSlot(expr->literal_slot()));
   1314     __ Move(rcx, constant_elements);
   1315     Callable callable =
   1316         CodeFactory::FastCloneShallowArray(isolate(), TRACK_ALLOCATION_SITE);
   1317     __ Call(callable.code(), RelocInfo::CODE_TARGET);
   1318     RestoreContext();
   1319   }
   1320   PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
   1321 
   1322   bool result_saved = false;  // Is the result saved to the stack?
   1323   ZoneList<Expression*>* subexprs = expr->values();
   1324   int length = subexprs->length();
   1325 
   1326   // Emit code to evaluate all the non-constant subexpressions and to store
   1327   // them into the newly cloned array.
   1328   for (int array_index = 0; array_index < length; array_index++) {
   1329     Expression* subexpr = subexprs->at(array_index);
   1330     DCHECK(!subexpr->IsSpread());
   1331 
   1332     // If the subexpression is a literal or a simple materialized literal it
   1333     // is already set in the cloned array.
   1334     if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
   1335 
   1336     if (!result_saved) {
   1337       PushOperand(rax);  // array literal
   1338       result_saved = true;
   1339     }
   1340     VisitForAccumulatorValue(subexpr);
   1341 
   1342     __ Move(StoreDescriptor::NameRegister(), Smi::FromInt(array_index));
   1343     __ movp(StoreDescriptor::ReceiverRegister(), Operand(rsp, 0));
   1344     CallKeyedStoreIC(expr->LiteralFeedbackSlot());
   1345 
   1346     PrepareForBailoutForId(expr->GetIdForElement(array_index),
   1347                            BailoutState::NO_REGISTERS);
   1348   }
   1349 
   1350   if (result_saved) {
   1351     context()->PlugTOS();
   1352   } else {
   1353     context()->Plug(rax);
   1354   }
   1355 }
   1356 
   1357 
   1358 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
   1359   DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
   1360 
   1361   Comment cmnt(masm_, "[ Assignment");
   1362 
   1363   Property* property = expr->target()->AsProperty();
   1364   LhsKind assign_type = Property::GetAssignType(property);
   1365 
   1366   // Evaluate LHS expression.
   1367   switch (assign_type) {
   1368     case VARIABLE:
   1369       // Nothing to do here.
   1370       break;
   1371     case NAMED_PROPERTY:
   1372       if (expr->is_compound()) {
   1373         // We need the receiver both on the stack and in the register.
   1374         VisitForStackValue(property->obj());
   1375         __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
   1376       } else {
   1377         VisitForStackValue(property->obj());
   1378       }
   1379       break;
   1380     case KEYED_PROPERTY: {
   1381       if (expr->is_compound()) {
   1382         VisitForStackValue(property->obj());
   1383         VisitForStackValue(property->key());
   1384         __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, kPointerSize));
   1385         __ movp(LoadDescriptor::NameRegister(), Operand(rsp, 0));
   1386       } else {
   1387         VisitForStackValue(property->obj());
   1388         VisitForStackValue(property->key());
   1389       }
   1390       break;
   1391     }
   1392     case NAMED_SUPER_PROPERTY:
   1393     case KEYED_SUPER_PROPERTY:
   1394       UNREACHABLE();
   1395       break;
   1396   }
   1397 
   1398   // For compound assignments we need another deoptimization point after the
   1399   // variable/property load.
   1400   if (expr->is_compound()) {
   1401     { AccumulatorValueContext context(this);
   1402       switch (assign_type) {
   1403         case VARIABLE:
   1404           EmitVariableLoad(expr->target()->AsVariableProxy());
   1405           PrepareForBailout(expr->target(), BailoutState::TOS_REGISTER);
   1406           break;
   1407         case NAMED_PROPERTY:
   1408           EmitNamedPropertyLoad(property);
   1409           PrepareForBailoutForId(property->LoadId(),
   1410                                  BailoutState::TOS_REGISTER);
   1411           break;
   1412         case KEYED_PROPERTY:
   1413           EmitKeyedPropertyLoad(property);
   1414           PrepareForBailoutForId(property->LoadId(),
   1415                                  BailoutState::TOS_REGISTER);
   1416           break;
   1417         case NAMED_SUPER_PROPERTY:
   1418         case KEYED_SUPER_PROPERTY:
   1419           UNREACHABLE();
   1420           break;
   1421       }
   1422     }
   1423 
   1424     Token::Value op = expr->binary_op();
   1425     PushOperand(rax);  // Left operand goes on the stack.
   1426     VisitForAccumulatorValue(expr->value());
   1427 
   1428     AccumulatorValueContext context(this);
   1429     if (ShouldInlineSmiCase(op)) {
   1430       EmitInlineSmiBinaryOp(expr->binary_operation(),
   1431                             op,
   1432                             expr->target(),
   1433                             expr->value());
   1434     } else {
   1435       EmitBinaryOp(expr->binary_operation(), op);
   1436     }
   1437     // Deoptimization point in case the binary operation may have side effects.
   1438     PrepareForBailout(expr->binary_operation(), BailoutState::TOS_REGISTER);
   1439   } else {
   1440     VisitForAccumulatorValue(expr->value());
   1441   }
   1442 
   1443   SetExpressionPosition(expr);
   1444 
   1445   // Store the value.
   1446   switch (assign_type) {
   1447     case VARIABLE: {
   1448       VariableProxy* proxy = expr->target()->AsVariableProxy();
   1449       EmitVariableAssignment(proxy->var(), expr->op(), expr->AssignmentSlot(),
   1450                              proxy->hole_check_mode());
   1451       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
   1452       context()->Plug(rax);
   1453       break;
   1454     }
   1455     case NAMED_PROPERTY:
   1456       EmitNamedPropertyAssignment(expr);
   1457       break;
   1458     case KEYED_PROPERTY:
   1459       EmitKeyedPropertyAssignment(expr);
   1460       break;
   1461     case NAMED_SUPER_PROPERTY:
   1462     case KEYED_SUPER_PROPERTY:
   1463       UNREACHABLE();
   1464       break;
   1465   }
   1466 }
   1467 
   1468 
   1469 void FullCodeGenerator::VisitYield(Yield* expr) {
   1470   // Resumable functions are not supported.
   1471   UNREACHABLE();
   1472 }
   1473 
   1474 void FullCodeGenerator::PushOperand(MemOperand operand) {
   1475   OperandStackDepthIncrement(1);
   1476   __ Push(operand);
   1477 }
   1478 
   1479 void FullCodeGenerator::EmitOperandStackDepthCheck() {
   1480   if (FLAG_debug_code) {
   1481     int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
   1482                         operand_stack_depth_ * kPointerSize;
   1483     __ movp(rax, rbp);
   1484     __ subp(rax, rsp);
   1485     __ cmpp(rax, Immediate(expected_diff));
   1486     __ Assert(equal, kUnexpectedStackDepth);
   1487   }
   1488 }
   1489 
   1490 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
   1491   Label allocate, done_allocate;
   1492 
   1493   __ Allocate(JSIteratorResult::kSize, rax, rcx, rdx, &allocate,
   1494               NO_ALLOCATION_FLAGS);
   1495   __ jmp(&done_allocate, Label::kNear);
   1496 
   1497   __ bind(&allocate);
   1498   __ Push(Smi::FromInt(JSIteratorResult::kSize));
   1499   __ CallRuntime(Runtime::kAllocateInNewSpace);
   1500 
   1501   __ bind(&done_allocate);
   1502   __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, rbx);
   1503   __ movp(FieldOperand(rax, HeapObject::kMapOffset), rbx);
   1504   __ LoadRoot(rbx, Heap::kEmptyFixedArrayRootIndex);
   1505   __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), rbx);
   1506   __ movp(FieldOperand(rax, JSObject::kElementsOffset), rbx);
   1507   __ Pop(FieldOperand(rax, JSIteratorResult::kValueOffset));
   1508   __ LoadRoot(FieldOperand(rax, JSIteratorResult::kDoneOffset),
   1509               done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
   1510   STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
   1511   OperandStackDepthDecrement(1);
   1512 }
   1513 
   1514 
   1515 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
   1516                                               Token::Value op,
   1517                                               Expression* left,
   1518                                               Expression* right) {
   1519   // Do combined smi check of the operands. Left operand is on the
   1520   // stack (popped into rdx). Right operand is in rax but moved into
   1521   // rcx to make the shifts easier.
   1522   Label done, stub_call, smi_case;
   1523   PopOperand(rdx);
   1524   __ movp(rcx, rax);
   1525   __ orp(rax, rdx);
   1526   JumpPatchSite patch_site(masm_);
   1527   patch_site.EmitJumpIfSmi(rax, &smi_case, Label::kNear);
   1528 
   1529   __ bind(&stub_call);
   1530   __ movp(rax, rcx);
   1531   Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
   1532   CallIC(code, expr->BinaryOperationFeedbackId());
   1533   patch_site.EmitPatchInfo();
   1534   __ jmp(&done, Label::kNear);
   1535 
   1536   __ bind(&smi_case);
   1537   switch (op) {
   1538     case Token::SAR:
   1539       __ SmiShiftArithmeticRight(rax, rdx, rcx);
   1540       break;
   1541     case Token::SHL:
   1542       __ SmiShiftLeft(rax, rdx, rcx, &stub_call);
   1543       break;
   1544     case Token::SHR:
   1545       __ SmiShiftLogicalRight(rax, rdx, rcx, &stub_call);
   1546       break;
   1547     case Token::ADD:
   1548       __ SmiAdd(rax, rdx, rcx, &stub_call);
   1549       break;
   1550     case Token::SUB:
   1551       __ SmiSub(rax, rdx, rcx, &stub_call);
   1552       break;
   1553     case Token::MUL:
   1554       __ SmiMul(rax, rdx, rcx, &stub_call);
   1555       break;
   1556     case Token::BIT_OR:
   1557       __ SmiOr(rax, rdx, rcx);
   1558       break;
   1559     case Token::BIT_AND:
   1560       __ SmiAnd(rax, rdx, rcx);
   1561       break;
   1562     case Token::BIT_XOR:
   1563       __ SmiXor(rax, rdx, rcx);
   1564       break;
   1565     default:
   1566       UNREACHABLE();
   1567       break;
   1568   }
   1569 
   1570   __ bind(&done);
   1571   context()->Plug(rax);
   1572 }
   1573 
   1574 
   1575 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
   1576   PopOperand(rdx);
   1577   Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
   1578   JumpPatchSite patch_site(masm_);    // unbound, signals no inlined smi code.
   1579   CallIC(code, expr->BinaryOperationFeedbackId());
   1580   patch_site.EmitPatchInfo();
   1581   context()->Plug(rax);
   1582 }
   1583 
   1584 void FullCodeGenerator::EmitAssignment(Expression* expr, FeedbackSlot slot) {
   1585   DCHECK(expr->IsValidReferenceExpressionOrThis());
   1586 
   1587   Property* prop = expr->AsProperty();
   1588   LhsKind assign_type = Property::GetAssignType(prop);
   1589 
   1590   switch (assign_type) {
   1591     case VARIABLE: {
   1592       VariableProxy* proxy = expr->AsVariableProxy();
   1593       EffectContext context(this);
   1594       EmitVariableAssignment(proxy->var(), Token::ASSIGN, slot,
   1595                              proxy->hole_check_mode());
   1596       break;
   1597     }
   1598     case NAMED_PROPERTY: {
   1599       PushOperand(rax);  // Preserve value.
   1600       VisitForAccumulatorValue(prop->obj());
   1601       __ Move(StoreDescriptor::ReceiverRegister(), rax);
   1602       PopOperand(StoreDescriptor::ValueRegister());  // Restore value.
   1603       CallStoreIC(slot, prop->key()->AsLiteral()->value());
   1604       break;
   1605     }
   1606     case KEYED_PROPERTY: {
   1607       PushOperand(rax);  // Preserve value.
   1608       VisitForStackValue(prop->obj());
   1609       VisitForAccumulatorValue(prop->key());
   1610       __ Move(StoreDescriptor::NameRegister(), rax);
   1611       PopOperand(StoreDescriptor::ReceiverRegister());
   1612       PopOperand(StoreDescriptor::ValueRegister());  // Restore value.
   1613       CallKeyedStoreIC(slot);
   1614       break;
   1615     }
   1616     case NAMED_SUPER_PROPERTY:
   1617     case KEYED_SUPER_PROPERTY:
   1618       UNREACHABLE();
   1619       break;
   1620   }
   1621   context()->Plug(rax);
   1622 }
   1623 
   1624 
   1625 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
   1626     Variable* var, MemOperand location) {
   1627   __ movp(location, rax);
   1628   if (var->IsContextSlot()) {
   1629     __ movp(rdx, rax);
   1630     __ RecordWriteContextSlot(
   1631         rcx, Context::SlotOffset(var->index()), rdx, rbx, kDontSaveFPRegs);
   1632   }
   1633 }
   1634 
   1635 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
   1636                                                FeedbackSlot slot,
   1637                                                HoleCheckMode hole_check_mode) {
   1638   if (var->IsUnallocated()) {
   1639     // Global var, const, or let.
   1640     __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
   1641     CallStoreIC(slot, var->name());
   1642 
   1643   } else if (IsLexicalVariableMode(var->mode()) && op != Token::INIT) {
   1644     DCHECK(!var->IsLookupSlot());
   1645     DCHECK(var->IsStackAllocated() || var->IsContextSlot());
   1646     MemOperand location = VarOperand(var, rcx);
   1647     // Perform an initialization check for lexically declared variables.
   1648     if (hole_check_mode == HoleCheckMode::kRequired) {
   1649       Label assign;
   1650       __ movp(rdx, location);
   1651       __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
   1652       __ j(not_equal, &assign, Label::kNear);
   1653       __ Push(var->name());
   1654       __ CallRuntime(Runtime::kThrowReferenceError);
   1655       __ bind(&assign);
   1656     }
   1657     if (var->mode() != CONST) {
   1658       EmitStoreToStackLocalOrContextSlot(var, location);
   1659     } else if (var->throw_on_const_assignment(language_mode())) {
   1660       __ CallRuntime(Runtime::kThrowConstAssignError);
   1661     }
   1662 
   1663   } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
   1664     // Initializing assignment to const {this} needs a write barrier.
   1665     DCHECK(var->IsStackAllocated() || var->IsContextSlot());
   1666     Label uninitialized_this;
   1667     MemOperand location = VarOperand(var, rcx);
   1668     __ movp(rdx, location);
   1669     __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
   1670     __ j(equal, &uninitialized_this);
   1671     __ Push(var->name());
   1672     __ CallRuntime(Runtime::kThrowReferenceError);
   1673     __ bind(&uninitialized_this);
   1674     EmitStoreToStackLocalOrContextSlot(var, location);
   1675 
   1676   } else {
   1677     DCHECK(var->mode() != CONST || op == Token::INIT);
   1678     DCHECK(var->IsStackAllocated() || var->IsContextSlot());
   1679     DCHECK(!var->IsLookupSlot());
   1680     // Assignment to var or initializing assignment to let/const in harmony
   1681     // mode.
   1682     MemOperand location = VarOperand(var, rcx);
   1683     if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
   1684       // Check for an uninitialized let binding.
   1685       __ movp(rdx, location);
   1686       __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
   1687       __ Check(equal, kLetBindingReInitialization);
   1688     }
   1689     EmitStoreToStackLocalOrContextSlot(var, location);
   1690   }
   1691 }
   1692 
   1693 
   1694 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
   1695   // Assignment to a property, using a named store IC.
   1696   Property* prop = expr->target()->AsProperty();
   1697   DCHECK(prop != NULL);
   1698   DCHECK(prop->key()->IsLiteral());
   1699 
   1700   PopOperand(StoreDescriptor::ReceiverRegister());
   1701   CallStoreIC(expr->AssignmentSlot(), prop->key()->AsLiteral()->value());
   1702 
   1703   PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
   1704   context()->Plug(rax);
   1705 }
   1706 
   1707 
   1708 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
   1709   // Assignment to a property, using a keyed store IC.
   1710   PopOperand(StoreDescriptor::NameRegister());  // Key.
   1711   PopOperand(StoreDescriptor::ReceiverRegister());
   1712   DCHECK(StoreDescriptor::ValueRegister().is(rax));
   1713   CallKeyedStoreIC(expr->AssignmentSlot());
   1714 
   1715   PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
   1716   context()->Plug(rax);
   1717 }
   1718 
   1719 // Code common for calls using the IC.
   1720 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
   1721   Expression* callee = expr->expression();
   1722 
   1723   // Get the target function.
   1724   ConvertReceiverMode convert_mode;
   1725   if (callee->IsVariableProxy()) {
   1726     { StackValueContext context(this);
   1727       EmitVariableLoad(callee->AsVariableProxy());
   1728       PrepareForBailout(callee, BailoutState::NO_REGISTERS);
   1729     }
   1730     // Push undefined as receiver. This is patched in the Call builtin if it
   1731     // is a sloppy mode method.
   1732     PushOperand(isolate()->factory()->undefined_value());
   1733     convert_mode = ConvertReceiverMode::kNullOrUndefined;
   1734   } else {
   1735     // Load the function from the receiver.
   1736     DCHECK(callee->IsProperty());
   1737     DCHECK(!callee->AsProperty()->IsSuperAccess());
   1738     __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
   1739     EmitNamedPropertyLoad(callee->AsProperty());
   1740     PrepareForBailoutForId(callee->AsProperty()->LoadId(),
   1741                            BailoutState::TOS_REGISTER);
   1742     // Push the target function under the receiver.
   1743     PushOperand(Operand(rsp, 0));
   1744     __ movp(Operand(rsp, kPointerSize), rax);
   1745     convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
   1746   }
   1747 
   1748   EmitCall(expr, convert_mode);
   1749 }
   1750 
   1751 
   1752 // Common code for calls using the IC.
   1753 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
   1754                                                 Expression* key) {
   1755   // Load the key.
   1756   VisitForAccumulatorValue(key);
   1757 
   1758   Expression* callee = expr->expression();
   1759 
   1760   // Load the function from the receiver.
   1761   DCHECK(callee->IsProperty());
   1762   __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
   1763   __ Move(LoadDescriptor::NameRegister(), rax);
   1764   EmitKeyedPropertyLoad(callee->AsProperty());
   1765   PrepareForBailoutForId(callee->AsProperty()->LoadId(),
   1766                          BailoutState::TOS_REGISTER);
   1767 
   1768   // Push the target function under the receiver.
   1769   PushOperand(Operand(rsp, 0));
   1770   __ movp(Operand(rsp, kPointerSize), rax);
   1771 
   1772   EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
   1773 }
   1774 
   1775 
   1776 void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
   1777   // Load the arguments.
   1778   ZoneList<Expression*>* args = expr->arguments();
   1779   int arg_count = args->length();
   1780   for (int i = 0; i < arg_count; i++) {
   1781     VisitForStackValue(args->at(i));
   1782   }
   1783 
   1784   PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
   1785   SetCallPosition(expr, expr->tail_call_mode());
   1786   if (expr->tail_call_mode() == TailCallMode::kAllow) {
   1787     if (FLAG_trace) {
   1788       __ CallRuntime(Runtime::kTraceTailCall);
   1789     }
   1790     // Update profiling counters before the tail call since we will
   1791     // not return to this function.
   1792     EmitProfilingCounterHandlingForReturnSequence(true);
   1793   }
   1794   Handle<Code> code =
   1795       CodeFactory::CallICTrampoline(isolate(), mode, expr->tail_call_mode())
   1796           .code();
   1797   __ Set(rdx, IntFromSlot(expr->CallFeedbackICSlot()));
   1798   __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
   1799   __ Set(rax, arg_count);
   1800   CallIC(code);
   1801   OperandStackDepthDecrement(arg_count + 1);
   1802 
   1803   RecordJSReturnSite(expr);
   1804   RestoreContext();
   1805   // Discard the function left on TOS.
   1806   context()->DropAndPlug(1, rax);
   1807 }
   1808 
   1809 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
   1810   Comment cmnt(masm_, "[ CallNew");
   1811   // According to ECMA-262, section 11.2.2, page 44, the function
   1812   // expression in new calls must be evaluated before the
   1813   // arguments.
   1814 
   1815   // Push constructor on the stack.  If it's not a function it's used as
   1816   // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
   1817   // ignored.
   1818   DCHECK(!expr->expression()->IsSuperPropertyReference());
   1819   VisitForStackValue(expr->expression());
   1820 
   1821   // Push the arguments ("left-to-right") on the stack.
   1822   ZoneList<Expression*>* args = expr->arguments();
   1823   int arg_count = args->length();
   1824   for (int i = 0; i < arg_count; i++) {
   1825     VisitForStackValue(args->at(i));
   1826   }
   1827 
   1828   // Call the construct call builtin that handles allocation and
   1829   // constructor invocation.
   1830   SetConstructCallPosition(expr);
   1831 
   1832   // Load function and argument count into rdi and rax.
   1833   __ Set(rax, arg_count);
   1834   __ movp(rdi, Operand(rsp, arg_count * kPointerSize));
   1835 
   1836   // Record call targets in unoptimized code, but not in the snapshot.
   1837   __ EmitLoadFeedbackVector(rbx);
   1838   __ Move(rdx, SmiFromSlot(expr->CallNewFeedbackSlot()));
   1839 
   1840   CallConstructStub stub(isolate());
   1841   CallIC(stub.GetCode());
   1842   OperandStackDepthDecrement(arg_count + 1);
   1843   PrepareForBailoutForId(expr->ReturnId(), BailoutState::TOS_REGISTER);
   1844   RestoreContext();
   1845   context()->Plug(rax);
   1846 }
   1847 
   1848 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
   1849   ZoneList<Expression*>* args = expr->arguments();
   1850   DCHECK(args->length() == 1);
   1851 
   1852   VisitForAccumulatorValue(args->at(0));
   1853 
   1854   Label materialize_true, materialize_false;
   1855   Label* if_true = NULL;
   1856   Label* if_false = NULL;
   1857   Label* fall_through = NULL;
   1858   context()->PrepareTest(&materialize_true, &materialize_false,
   1859                          &if_true, &if_false, &fall_through);
   1860 
   1861   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   1862   __ JumpIfSmi(rax, if_true);
   1863   __ jmp(if_false);
   1864 
   1865   context()->Plug(if_true, if_false);
   1866 }
   1867 
   1868 
   1869 void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
   1870   ZoneList<Expression*>* args = expr->arguments();
   1871   DCHECK(args->length() == 1);
   1872 
   1873   VisitForAccumulatorValue(args->at(0));
   1874 
   1875   Label materialize_true, materialize_false;
   1876   Label* if_true = NULL;
   1877   Label* if_false = NULL;
   1878   Label* fall_through = NULL;
   1879   context()->PrepareTest(&materialize_true, &materialize_false,
   1880                          &if_true, &if_false, &fall_through);
   1881 
   1882   __ JumpIfSmi(rax, if_false);
   1883   __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rbx);
   1884   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   1885   Split(above_equal, if_true, if_false, fall_through);
   1886 
   1887   context()->Plug(if_true, if_false);
   1888 }
   1889 
   1890 
   1891 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
   1892   ZoneList<Expression*>* args = expr->arguments();
   1893   DCHECK(args->length() == 1);
   1894 
   1895   VisitForAccumulatorValue(args->at(0));
   1896 
   1897   Label materialize_true, materialize_false;
   1898   Label* if_true = NULL;
   1899   Label* if_false = NULL;
   1900   Label* fall_through = NULL;
   1901   context()->PrepareTest(&materialize_true, &materialize_false,
   1902                          &if_true, &if_false, &fall_through);
   1903 
   1904   __ JumpIfSmi(rax, if_false);
   1905   __ CmpObjectType(rax, JS_ARRAY_TYPE, rbx);
   1906   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   1907   Split(equal, if_true, if_false, fall_through);
   1908 
   1909   context()->Plug(if_true, if_false);
   1910 }
   1911 
   1912 
   1913 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
   1914   ZoneList<Expression*>* args = expr->arguments();
   1915   DCHECK(args->length() == 1);
   1916 
   1917   VisitForAccumulatorValue(args->at(0));
   1918 
   1919   Label materialize_true, materialize_false;
   1920   Label* if_true = NULL;
   1921   Label* if_false = NULL;
   1922   Label* fall_through = NULL;
   1923   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
   1924                          &if_false, &fall_through);
   1925 
   1926   __ JumpIfSmi(rax, if_false);
   1927   __ CmpObjectType(rax, JS_TYPED_ARRAY_TYPE, rbx);
   1928   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   1929   Split(equal, if_true, if_false, fall_through);
   1930 
   1931   context()->Plug(if_true, if_false);
   1932 }
   1933 
   1934 
   1935 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
   1936   ZoneList<Expression*>* args = expr->arguments();
   1937   DCHECK(args->length() == 1);
   1938 
   1939   VisitForAccumulatorValue(args->at(0));
   1940 
   1941   Label materialize_true, materialize_false;
   1942   Label* if_true = NULL;
   1943   Label* if_false = NULL;
   1944   Label* fall_through = NULL;
   1945   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
   1946                          &if_false, &fall_through);
   1947 
   1948 
   1949   __ JumpIfSmi(rax, if_false);
   1950   __ CmpObjectType(rax, JS_PROXY_TYPE, rbx);
   1951   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   1952   Split(equal, if_true, if_false, fall_through);
   1953 
   1954   context()->Plug(if_true, if_false);
   1955 }
   1956 
   1957 
   1958 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
   1959   ZoneList<Expression*>* args = expr->arguments();
   1960   DCHECK(args->length() == 1);
   1961   Label done, null, function, non_function_constructor;
   1962 
   1963   VisitForAccumulatorValue(args->at(0));
   1964 
   1965   // If the object is not a JSReceiver, we return null.
   1966   __ JumpIfSmi(rax, &null, Label::kNear);
   1967   STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
   1968   __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rax);
   1969   __ j(below, &null, Label::kNear);
   1970 
   1971   // Return 'Function' for JSFunction and JSBoundFunction objects.
   1972   __ CmpInstanceType(rax, FIRST_FUNCTION_TYPE);
   1973   STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
   1974   __ j(above_equal, &function, Label::kNear);
   1975 
   1976   // Check if the constructor in the map is a JS function.
   1977   __ GetMapConstructor(rax, rax, rbx);
   1978   __ CmpInstanceType(rbx, JS_FUNCTION_TYPE);
   1979   __ j(not_equal, &non_function_constructor, Label::kNear);
   1980 
   1981   // rax now contains the constructor function. Grab the
   1982   // instance class name from there.
   1983   __ movp(rax, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset));
   1984   __ movp(rax, FieldOperand(rax, SharedFunctionInfo::kInstanceClassNameOffset));
   1985   __ jmp(&done, Label::kNear);
   1986 
   1987   // Non-JS objects have class null.
   1988   __ bind(&null);
   1989   __ LoadRoot(rax, Heap::kNullValueRootIndex);
   1990   __ jmp(&done, Label::kNear);
   1991 
   1992   // Functions have class 'Function'.
   1993   __ bind(&function);
   1994   __ LoadRoot(rax, Heap::kFunction_stringRootIndex);
   1995   __ jmp(&done, Label::kNear);
   1996 
   1997   // Objects with a non-function constructor have class 'Object'.
   1998   __ bind(&non_function_constructor);
   1999   __ LoadRoot(rax, Heap::kObject_stringRootIndex);
   2000 
   2001   // All done.
   2002   __ bind(&done);
   2003 
   2004   context()->Plug(rax);
   2005 }
   2006 
   2007 
   2008 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
   2009   ZoneList<Expression*>* args = expr->arguments();
   2010   DCHECK(args->length() == 2);
   2011 
   2012   VisitForStackValue(args->at(0));
   2013   VisitForAccumulatorValue(args->at(1));
   2014 
   2015   Register object = rbx;
   2016   Register index = rax;
   2017   Register result = rdx;
   2018 
   2019   PopOperand(object);
   2020 
   2021   Label need_conversion;
   2022   Label index_out_of_range;
   2023   Label done;
   2024   StringCharCodeAtGenerator generator(object, index, result, &need_conversion,
   2025                                       &need_conversion, &index_out_of_range);
   2026   generator.GenerateFast(masm_);
   2027   __ jmp(&done);
   2028 
   2029   __ bind(&index_out_of_range);
   2030   // When the index is out of range, the spec requires us to return
   2031   // NaN.
   2032   __ LoadRoot(result, Heap::kNanValueRootIndex);
   2033   __ jmp(&done);
   2034 
   2035   __ bind(&need_conversion);
   2036   // Move the undefined value into the result register, which will
   2037   // trigger conversion.
   2038   __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
   2039   __ jmp(&done);
   2040 
   2041   NopRuntimeCallHelper call_helper;
   2042   generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
   2043 
   2044   __ bind(&done);
   2045   context()->Plug(result);
   2046 }
   2047 
   2048 
   2049 void FullCodeGenerator::EmitCall(CallRuntime* expr) {
   2050   ZoneList<Expression*>* args = expr->arguments();
   2051   DCHECK_LE(2, args->length());
   2052   // Push target, receiver and arguments onto the stack.
   2053   for (Expression* const arg : *args) {
   2054     VisitForStackValue(arg);
   2055   }
   2056   PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
   2057   // Move target to rdi.
   2058   int const argc = args->length() - 2;
   2059   __ movp(rdi, Operand(rsp, (argc + 1) * kPointerSize));
   2060   // Call the target.
   2061   __ Set(rax, argc);
   2062   __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
   2063   OperandStackDepthDecrement(argc + 1);
   2064   RestoreContext();
   2065   // Discard the function left on TOS.
   2066   context()->DropAndPlug(1, rax);
   2067 }
   2068 
   2069 void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
   2070   ZoneList<Expression*>* args = expr->arguments();
   2071   DCHECK_EQ(1, args->length());
   2072   VisitForAccumulatorValue(args->at(0));
   2073   __ AssertFunction(rax);
   2074   __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset));
   2075   __ movp(rax, FieldOperand(rax, Map::kPrototypeOffset));
   2076   context()->Plug(rax);
   2077 }
   2078 
   2079 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
   2080   DCHECK(expr->arguments()->length() == 0);
   2081   ExternalReference debug_is_active =
   2082       ExternalReference::debug_is_active_address(isolate());
   2083   __ Move(kScratchRegister, debug_is_active);
   2084   __ movzxbp(rax, Operand(kScratchRegister, 0));
   2085   __ Integer32ToSmi(rax, rax);
   2086   context()->Plug(rax);
   2087 }
   2088 
   2089 
   2090 void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
   2091   ZoneList<Expression*>* args = expr->arguments();
   2092   DCHECK_EQ(2, args->length());
   2093   VisitForStackValue(args->at(0));
   2094   VisitForStackValue(args->at(1));
   2095 
   2096   Label runtime, done;
   2097 
   2098   __ Allocate(JSIteratorResult::kSize, rax, rcx, rdx, &runtime,
   2099               NO_ALLOCATION_FLAGS);
   2100   __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, rbx);
   2101   __ movp(FieldOperand(rax, HeapObject::kMapOffset), rbx);
   2102   __ LoadRoot(rbx, Heap::kEmptyFixedArrayRootIndex);
   2103   __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), rbx);
   2104   __ movp(FieldOperand(rax, JSObject::kElementsOffset), rbx);
   2105   __ Pop(FieldOperand(rax, JSIteratorResult::kDoneOffset));
   2106   __ Pop(FieldOperand(rax, JSIteratorResult::kValueOffset));
   2107   STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
   2108   __ jmp(&done, Label::kNear);
   2109 
   2110   __ bind(&runtime);
   2111   CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
   2112 
   2113   __ bind(&done);
   2114   context()->Plug(rax);
   2115 }
   2116 
   2117 
   2118 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
   2119   // Push function.
   2120   __ LoadNativeContextSlot(expr->context_index(), rax);
   2121   PushOperand(rax);
   2122 
   2123   // Push undefined as receiver.
   2124   OperandStackDepthIncrement(1);
   2125   __ PushRoot(Heap::kUndefinedValueRootIndex);
   2126 }
   2127 
   2128 
   2129 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
   2130   ZoneList<Expression*>* args = expr->arguments();
   2131   int arg_count = args->length();
   2132 
   2133   SetCallPosition(expr);
   2134   __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
   2135   __ Set(rax, arg_count);
   2136   __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
   2137           RelocInfo::CODE_TARGET);
   2138   OperandStackDepthDecrement(arg_count + 1);
   2139   RestoreContext();
   2140 }
   2141 
   2142 
   2143 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
   2144   switch (expr->op()) {
   2145     case Token::DELETE: {
   2146       Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
   2147       Property* property = expr->expression()->AsProperty();
   2148       VariableProxy* proxy = expr->expression()->AsVariableProxy();
   2149 
   2150       if (property != NULL) {
   2151         VisitForStackValue(property->obj());
   2152         VisitForStackValue(property->key());
   2153         CallRuntimeWithOperands(is_strict(language_mode())
   2154                                     ? Runtime::kDeleteProperty_Strict
   2155                                     : Runtime::kDeleteProperty_Sloppy);
   2156         context()->Plug(rax);
   2157       } else if (proxy != NULL) {
   2158         Variable* var = proxy->var();
   2159         // Delete of an unqualified identifier is disallowed in strict mode but
   2160         // "delete this" is allowed.
   2161         bool is_this = var->is_this();
   2162         DCHECK(is_sloppy(language_mode()) || is_this);
   2163         if (var->IsUnallocated()) {
   2164           __ movp(rax, NativeContextOperand());
   2165           __ Push(ContextOperand(rax, Context::EXTENSION_INDEX));
   2166           __ Push(var->name());
   2167           __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
   2168           context()->Plug(rax);
   2169         } else {
   2170           DCHECK(!var->IsLookupSlot());
   2171           DCHECK(var->IsStackAllocated() || var->IsContextSlot());
   2172           // Result of deleting non-global variables is false.  'this' is
   2173           // not really a variable, though we implement it as one.  The
   2174           // subexpression does not have side effects.
   2175           context()->Plug(is_this);
   2176         }
   2177       } else {
   2178         // Result of deleting non-property, non-variable reference is true.
   2179         // The subexpression may have side effects.
   2180         VisitForEffect(expr->expression());
   2181         context()->Plug(true);
   2182       }
   2183       break;
   2184     }
   2185 
   2186     case Token::VOID: {
   2187       Comment cmnt(masm_, "[ UnaryOperation (VOID)");
   2188       VisitForEffect(expr->expression());
   2189       context()->Plug(Heap::kUndefinedValueRootIndex);
   2190       break;
   2191     }
   2192 
   2193     case Token::NOT: {
   2194       Comment cmnt(masm_, "[ UnaryOperation (NOT)");
   2195       if (context()->IsEffect()) {
   2196         // Unary NOT has no side effects so it's only necessary to visit the
   2197         // subexpression.  Match the optimizing compiler by not branching.
   2198         VisitForEffect(expr->expression());
   2199       } else if (context()->IsTest()) {
   2200         const TestContext* test = TestContext::cast(context());
   2201         // The labels are swapped for the recursive call.
   2202         VisitForControl(expr->expression(),
   2203                         test->false_label(),
   2204                         test->true_label(),
   2205                         test->fall_through());
   2206         context()->Plug(test->true_label(), test->false_label());
   2207       } else {
   2208         // We handle value contexts explicitly rather than simply visiting
   2209         // for control and plugging the control flow into the context,
   2210         // because we need to prepare a pair of extra administrative AST ids
   2211         // for the optimizing compiler.
   2212         DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
   2213         Label materialize_true, materialize_false, done;
   2214         VisitForControl(expr->expression(),
   2215                         &materialize_false,
   2216                         &materialize_true,
   2217                         &materialize_true);
   2218         if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
   2219         __ bind(&materialize_true);
   2220         PrepareForBailoutForId(expr->MaterializeTrueId(),
   2221                                BailoutState::NO_REGISTERS);
   2222         if (context()->IsAccumulatorValue()) {
   2223           __ LoadRoot(rax, Heap::kTrueValueRootIndex);
   2224         } else {
   2225           __ PushRoot(Heap::kTrueValueRootIndex);
   2226         }
   2227         __ jmp(&done, Label::kNear);
   2228         __ bind(&materialize_false);
   2229         PrepareForBailoutForId(expr->MaterializeFalseId(),
   2230                                BailoutState::NO_REGISTERS);
   2231         if (context()->IsAccumulatorValue()) {
   2232           __ LoadRoot(rax, Heap::kFalseValueRootIndex);
   2233         } else {
   2234           __ PushRoot(Heap::kFalseValueRootIndex);
   2235         }
   2236         __ bind(&done);
   2237       }
   2238       break;
   2239     }
   2240 
   2241     case Token::TYPEOF: {
   2242       Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
   2243       {
   2244         AccumulatorValueContext context(this);
   2245         VisitForTypeofValue(expr->expression());
   2246       }
   2247       __ movp(rbx, rax);
   2248       __ Call(isolate()->builtins()->Typeof(), RelocInfo::CODE_TARGET);
   2249       context()->Plug(rax);
   2250       break;
   2251     }
   2252 
   2253     default:
   2254       UNREACHABLE();
   2255   }
   2256 }
   2257 
   2258 
   2259 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
   2260   DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
   2261 
   2262   Comment cmnt(masm_, "[ CountOperation");
   2263 
   2264   Property* prop = expr->expression()->AsProperty();
   2265   LhsKind assign_type = Property::GetAssignType(prop);
   2266 
   2267   // Evaluate expression and get value.
   2268   if (assign_type == VARIABLE) {
   2269     DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
   2270     AccumulatorValueContext context(this);
   2271     EmitVariableLoad(expr->expression()->AsVariableProxy());
   2272   } else {
   2273     // Reserve space for result of postfix operation.
   2274     if (expr->is_postfix() && !context()->IsEffect()) {
   2275       PushOperand(Smi::kZero);
   2276     }
   2277     switch (assign_type) {
   2278       case NAMED_PROPERTY: {
   2279         VisitForStackValue(prop->obj());
   2280         __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
   2281         EmitNamedPropertyLoad(prop);
   2282         break;
   2283       }
   2284 
   2285       case KEYED_PROPERTY: {
   2286         VisitForStackValue(prop->obj());
   2287         VisitForStackValue(prop->key());
   2288         // Leave receiver on stack
   2289         __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, kPointerSize));
   2290         // Copy of key, needed for later store.
   2291         __ movp(LoadDescriptor::NameRegister(), Operand(rsp, 0));
   2292         EmitKeyedPropertyLoad(prop);
   2293         break;
   2294       }
   2295 
   2296       case NAMED_SUPER_PROPERTY:
   2297       case KEYED_SUPER_PROPERTY:
   2298       case VARIABLE:
   2299         UNREACHABLE();
   2300     }
   2301   }
   2302 
   2303   // We need a second deoptimization point after loading the value
   2304   // in case evaluating the property load my have a side effect.
   2305   if (assign_type == VARIABLE) {
   2306     PrepareForBailout(expr->expression(), BailoutState::TOS_REGISTER);
   2307   } else {
   2308     PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
   2309   }
   2310 
   2311   // Inline smi case if we are in a loop.
   2312   Label done, stub_call;
   2313   JumpPatchSite patch_site(masm_);
   2314   if (ShouldInlineSmiCase(expr->op())) {
   2315     Label slow;
   2316     patch_site.EmitJumpIfNotSmi(rax, &slow, Label::kNear);
   2317 
   2318     // Save result for postfix expressions.
   2319     if (expr->is_postfix()) {
   2320       if (!context()->IsEffect()) {
   2321         // Save the result on the stack. If we have a named or keyed property
   2322         // we store the result under the receiver that is currently on top
   2323         // of the stack.
   2324         switch (assign_type) {
   2325           case VARIABLE:
   2326             __ Push(rax);
   2327             break;
   2328           case NAMED_PROPERTY:
   2329             __ movp(Operand(rsp, kPointerSize), rax);
   2330             break;
   2331           case KEYED_PROPERTY:
   2332             __ movp(Operand(rsp, 2 * kPointerSize), rax);
   2333             break;
   2334           case NAMED_SUPER_PROPERTY:
   2335           case KEYED_SUPER_PROPERTY:
   2336             UNREACHABLE();
   2337             break;
   2338         }
   2339       }
   2340     }
   2341 
   2342     SmiOperationConstraints constraints =
   2343         SmiOperationConstraint::kPreserveSourceRegister |
   2344         SmiOperationConstraint::kBailoutOnNoOverflow;
   2345     if (expr->op() == Token::INC) {
   2346       __ SmiAddConstant(rax, rax, Smi::FromInt(1), constraints, &done,
   2347                         Label::kNear);
   2348     } else {
   2349       __ SmiSubConstant(rax, rax, Smi::FromInt(1), constraints, &done,
   2350                         Label::kNear);
   2351     }
   2352     __ jmp(&stub_call, Label::kNear);
   2353     __ bind(&slow);
   2354   }
   2355 
   2356   // Convert old value into a number.
   2357   __ Call(isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
   2358   RestoreContext();
   2359   PrepareForBailoutForId(expr->ToNumberId(), BailoutState::TOS_REGISTER);
   2360 
   2361   // Save result for postfix expressions.
   2362   if (expr->is_postfix()) {
   2363     if (!context()->IsEffect()) {
   2364       // Save the result on the stack. If we have a named or keyed property
   2365       // we store the result under the receiver that is currently on top
   2366       // of the stack.
   2367       switch (assign_type) {
   2368         case VARIABLE:
   2369           PushOperand(rax);
   2370           break;
   2371         case NAMED_PROPERTY:
   2372           __ movp(Operand(rsp, kPointerSize), rax);
   2373           break;
   2374         case KEYED_PROPERTY:
   2375           __ movp(Operand(rsp, 2 * kPointerSize), rax);
   2376           break;
   2377         case NAMED_SUPER_PROPERTY:
   2378         case KEYED_SUPER_PROPERTY:
   2379           UNREACHABLE();
   2380           break;
   2381       }
   2382     }
   2383   }
   2384 
   2385   SetExpressionPosition(expr);
   2386 
   2387   // Call stub for +1/-1.
   2388   __ bind(&stub_call);
   2389   __ movp(rdx, rax);
   2390   __ Move(rax, Smi::FromInt(1));
   2391   Handle<Code> code =
   2392       CodeFactory::BinaryOpIC(isolate(), expr->binary_op()).code();
   2393   CallIC(code, expr->CountBinOpFeedbackId());
   2394   patch_site.EmitPatchInfo();
   2395   __ bind(&done);
   2396 
   2397   // Store the value returned in rax.
   2398   switch (assign_type) {
   2399     case VARIABLE: {
   2400       VariableProxy* proxy = expr->expression()->AsVariableProxy();
   2401       if (expr->is_postfix()) {
   2402         // Perform the assignment as if via '='.
   2403         { EffectContext context(this);
   2404           EmitVariableAssignment(proxy->var(), Token::ASSIGN, expr->CountSlot(),
   2405                                  proxy->hole_check_mode());
   2406           PrepareForBailoutForId(expr->AssignmentId(),
   2407                                  BailoutState::TOS_REGISTER);
   2408           context.Plug(rax);
   2409         }
   2410         // For all contexts except kEffect: We have the result on
   2411         // top of the stack.
   2412         if (!context()->IsEffect()) {
   2413           context()->PlugTOS();
   2414         }
   2415       } else {
   2416         // Perform the assignment as if via '='.
   2417         EmitVariableAssignment(proxy->var(), Token::ASSIGN, expr->CountSlot(),
   2418                                proxy->hole_check_mode());
   2419         PrepareForBailoutForId(expr->AssignmentId(),
   2420                                BailoutState::TOS_REGISTER);
   2421         context()->Plug(rax);
   2422       }
   2423       break;
   2424     }
   2425     case NAMED_PROPERTY: {
   2426       PopOperand(StoreDescriptor::ReceiverRegister());
   2427       CallStoreIC(expr->CountSlot(), prop->key()->AsLiteral()->value());
   2428       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
   2429       if (expr->is_postfix()) {
   2430         if (!context()->IsEffect()) {
   2431           context()->PlugTOS();
   2432         }
   2433       } else {
   2434         context()->Plug(rax);
   2435       }
   2436       break;
   2437     }
   2438     case KEYED_PROPERTY: {
   2439       PopOperand(StoreDescriptor::NameRegister());
   2440       PopOperand(StoreDescriptor::ReceiverRegister());
   2441       CallKeyedStoreIC(expr->CountSlot());
   2442       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
   2443       if (expr->is_postfix()) {
   2444         if (!context()->IsEffect()) {
   2445           context()->PlugTOS();
   2446         }
   2447       } else {
   2448         context()->Plug(rax);
   2449       }
   2450       break;
   2451     }
   2452     case NAMED_SUPER_PROPERTY:
   2453     case KEYED_SUPER_PROPERTY:
   2454       UNREACHABLE();
   2455       break;
   2456   }
   2457 }
   2458 
   2459 
   2460 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
   2461                                                  Expression* sub_expr,
   2462                                                  Handle<String> check) {
   2463   Label materialize_true, materialize_false;
   2464   Label* if_true = NULL;
   2465   Label* if_false = NULL;
   2466   Label* fall_through = NULL;
   2467   context()->PrepareTest(&materialize_true, &materialize_false,
   2468                          &if_true, &if_false, &fall_through);
   2469 
   2470   { AccumulatorValueContext context(this);
   2471     VisitForTypeofValue(sub_expr);
   2472   }
   2473   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2474 
   2475   Factory* factory = isolate()->factory();
   2476   if (String::Equals(check, factory->number_string())) {
   2477     __ JumpIfSmi(rax, if_true);
   2478     __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset));
   2479     __ CompareRoot(rax, Heap::kHeapNumberMapRootIndex);
   2480     Split(equal, if_true, if_false, fall_through);
   2481   } else if (String::Equals(check, factory->string_string())) {
   2482     __ JumpIfSmi(rax, if_false);
   2483     __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx);
   2484     Split(below, if_true, if_false, fall_through);
   2485   } else if (String::Equals(check, factory->symbol_string())) {
   2486     __ JumpIfSmi(rax, if_false);
   2487     __ CmpObjectType(rax, SYMBOL_TYPE, rdx);
   2488     Split(equal, if_true, if_false, fall_through);
   2489   } else if (String::Equals(check, factory->boolean_string())) {
   2490     __ CompareRoot(rax, Heap::kTrueValueRootIndex);
   2491     __ j(equal, if_true);
   2492     __ CompareRoot(rax, Heap::kFalseValueRootIndex);
   2493     Split(equal, if_true, if_false, fall_through);
   2494   } else if (String::Equals(check, factory->undefined_string())) {
   2495     __ CompareRoot(rax, Heap::kNullValueRootIndex);
   2496     __ j(equal, if_false);
   2497     __ JumpIfSmi(rax, if_false);
   2498     // Check for undetectable objects => true.
   2499     __ movp(rdx, FieldOperand(rax, HeapObject::kMapOffset));
   2500     __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
   2501              Immediate(1 << Map::kIsUndetectable));
   2502     Split(not_zero, if_true, if_false, fall_through);
   2503   } else if (String::Equals(check, factory->function_string())) {
   2504     __ JumpIfSmi(rax, if_false);
   2505     // Check for callable and not undetectable objects => true.
   2506     __ movp(rdx, FieldOperand(rax, HeapObject::kMapOffset));
   2507     __ movzxbl(rdx, FieldOperand(rdx, Map::kBitFieldOffset));
   2508     __ andb(rdx,
   2509             Immediate((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
   2510     __ cmpb(rdx, Immediate(1 << Map::kIsCallable));
   2511     Split(equal, if_true, if_false, fall_through);
   2512   } else if (String::Equals(check, factory->object_string())) {
   2513     __ JumpIfSmi(rax, if_false);
   2514     __ CompareRoot(rax, Heap::kNullValueRootIndex);
   2515     __ j(equal, if_true);
   2516     STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
   2517     __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rdx);
   2518     __ j(below, if_false);
   2519     // Check for callable or undetectable objects => false.
   2520     __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
   2521              Immediate((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
   2522     Split(zero, if_true, if_false, fall_through);
   2523   } else {
   2524     if (if_false != fall_through) __ jmp(if_false);
   2525   }
   2526   context()->Plug(if_true, if_false);
   2527 }
   2528 
   2529 
   2530 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
   2531   Comment cmnt(masm_, "[ CompareOperation");
   2532 
   2533   // First we try a fast inlined version of the compare when one of
   2534   // the operands is a literal.
   2535   if (TryLiteralCompare(expr)) return;
   2536 
   2537   // Always perform the comparison for its control flow.  Pack the result
   2538   // into the expression's context after the comparison is performed.
   2539   Label materialize_true, materialize_false;
   2540   Label* if_true = NULL;
   2541   Label* if_false = NULL;
   2542   Label* fall_through = NULL;
   2543   context()->PrepareTest(&materialize_true, &materialize_false,
   2544                          &if_true, &if_false, &fall_through);
   2545 
   2546   Token::Value op = expr->op();
   2547   VisitForStackValue(expr->left());
   2548   switch (op) {
   2549     case Token::IN:
   2550       VisitForStackValue(expr->right());
   2551       SetExpressionPosition(expr);
   2552       EmitHasProperty();
   2553       PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
   2554       __ CompareRoot(rax, Heap::kTrueValueRootIndex);
   2555       Split(equal, if_true, if_false, fall_through);
   2556       break;
   2557 
   2558     case Token::INSTANCEOF: {
   2559       VisitForAccumulatorValue(expr->right());
   2560       SetExpressionPosition(expr);
   2561       PopOperand(rdx);
   2562       __ Call(isolate()->builtins()->InstanceOf(), RelocInfo::CODE_TARGET);
   2563       RestoreContext();
   2564       PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
   2565       __ CompareRoot(rax, Heap::kTrueValueRootIndex);
   2566       Split(equal, if_true, if_false, fall_through);
   2567       break;
   2568     }
   2569 
   2570     default: {
   2571       VisitForAccumulatorValue(expr->right());
   2572       SetExpressionPosition(expr);
   2573       Condition cc = CompareIC::ComputeCondition(op);
   2574       PopOperand(rdx);
   2575 
   2576       bool inline_smi_code = ShouldInlineSmiCase(op);
   2577       JumpPatchSite patch_site(masm_);
   2578       if (inline_smi_code) {
   2579         Label slow_case;
   2580         __ movp(rcx, rdx);
   2581         __ orp(rcx, rax);
   2582         patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
   2583         __ cmpp(rdx, rax);
   2584         Split(cc, if_true, if_false, NULL);
   2585         __ bind(&slow_case);
   2586       }
   2587 
   2588       Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
   2589       CallIC(ic, expr->CompareOperationFeedbackId());
   2590       patch_site.EmitPatchInfo();
   2591 
   2592       PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2593       __ testp(rax, rax);
   2594       Split(cc, if_true, if_false, fall_through);
   2595     }
   2596   }
   2597 
   2598   // Convert the result of the comparison into one expected for this
   2599   // expression's context.
   2600   context()->Plug(if_true, if_false);
   2601 }
   2602 
   2603 
   2604 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
   2605                                               Expression* sub_expr,
   2606                                               NilValue nil) {
   2607   Label materialize_true, materialize_false;
   2608   Label* if_true = NULL;
   2609   Label* if_false = NULL;
   2610   Label* fall_through = NULL;
   2611   context()->PrepareTest(&materialize_true, &materialize_false,
   2612                          &if_true, &if_false, &fall_through);
   2613 
   2614   VisitForAccumulatorValue(sub_expr);
   2615   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2616   if (expr->op() == Token::EQ_STRICT) {
   2617     Heap::RootListIndex nil_value = nil == kNullValue ?
   2618         Heap::kNullValueRootIndex :
   2619         Heap::kUndefinedValueRootIndex;
   2620     __ CompareRoot(rax, nil_value);
   2621     Split(equal, if_true, if_false, fall_through);
   2622   } else {
   2623     __ JumpIfSmi(rax, if_false);
   2624     __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset));
   2625     __ testb(FieldOperand(rax, Map::kBitFieldOffset),
   2626              Immediate(1 << Map::kIsUndetectable));
   2627     Split(not_zero, if_true, if_false, fall_through);
   2628   }
   2629   context()->Plug(if_true, if_false);
   2630 }
   2631 
   2632 
   2633 Register FullCodeGenerator::result_register() {
   2634   return rax;
   2635 }
   2636 
   2637 
   2638 Register FullCodeGenerator::context_register() {
   2639   return rsi;
   2640 }
   2641 
   2642 void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) {
   2643   DCHECK(IsAligned(frame_offset, kPointerSize));
   2644   __ movp(value, Operand(rbp, frame_offset));
   2645 }
   2646 
   2647 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
   2648   DCHECK(IsAligned(frame_offset, kPointerSize));
   2649   __ movp(Operand(rbp, frame_offset), value);
   2650 }
   2651 
   2652 
   2653 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
   2654   __ movp(dst, ContextOperand(rsi, context_index));
   2655 }
   2656 
   2657 
   2658 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
   2659   DeclarationScope* closure_scope = scope()->GetClosureScope();
   2660   if (closure_scope->is_script_scope() ||
   2661       closure_scope->is_module_scope()) {
   2662     // Contexts nested in the native context have a canonical empty function
   2663     // as their closure, not the anonymous closure containing the global
   2664     // code.
   2665     __ movp(rax, NativeContextOperand());
   2666     PushOperand(ContextOperand(rax, Context::CLOSURE_INDEX));
   2667   } else if (closure_scope->is_eval_scope()) {
   2668     // Contexts created by a call to eval have the same closure as the
   2669     // context calling eval, not the anonymous closure containing the eval
   2670     // code.  Fetch it from the context.
   2671     PushOperand(ContextOperand(rsi, Context::CLOSURE_INDEX));
   2672   } else {
   2673     DCHECK(closure_scope->is_function_scope());
   2674     PushOperand(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
   2675   }
   2676 }
   2677 
   2678 
   2679 #undef __
   2680 
   2681 
   2682 static const byte kJnsInstruction = 0x79;
   2683 static const byte kNopByteOne = 0x66;
   2684 static const byte kNopByteTwo = 0x90;
   2685 #ifdef DEBUG
   2686 static const byte kCallInstruction = 0xe8;
   2687 #endif
   2688 
   2689 
   2690 void BackEdgeTable::PatchAt(Code* unoptimized_code,
   2691                             Address pc,
   2692                             BackEdgeState target_state,
   2693                             Code* replacement_code) {
   2694   Address call_target_address = pc - kIntSize;
   2695   Address jns_instr_address = call_target_address - 3;
   2696   Address jns_offset_address = call_target_address - 2;
   2697 
   2698   switch (target_state) {
   2699     case INTERRUPT:
   2700       //     sub <profiling_counter>, <delta>  ;; Not changed
   2701       //     jns ok
   2702       //     call <interrupt stub>
   2703       //   ok:
   2704       *jns_instr_address = kJnsInstruction;
   2705       *jns_offset_address = kJnsOffset;
   2706       break;
   2707     case ON_STACK_REPLACEMENT:
   2708       //     sub <profiling_counter>, <delta>  ;; Not changed
   2709       //     nop
   2710       //     nop
   2711       //     call <on-stack replacment>
   2712       //   ok:
   2713       *jns_instr_address = kNopByteOne;
   2714       *jns_offset_address = kNopByteTwo;
   2715       break;
   2716   }
   2717 
   2718   Assembler::set_target_address_at(unoptimized_code->GetIsolate(),
   2719                                    call_target_address, unoptimized_code,
   2720                                    replacement_code->entry());
   2721   unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
   2722       unoptimized_code, call_target_address, replacement_code);
   2723 }
   2724 
   2725 
   2726 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
   2727     Isolate* isolate,
   2728     Code* unoptimized_code,
   2729     Address pc) {
   2730   Address call_target_address = pc - kIntSize;
   2731   Address jns_instr_address = call_target_address - 3;
   2732   DCHECK_EQ(kCallInstruction, *(call_target_address - 1));
   2733 
   2734   if (*jns_instr_address == kJnsInstruction) {
   2735     DCHECK_EQ(kJnsOffset, *(call_target_address - 2));
   2736     DCHECK_EQ(isolate->builtins()->InterruptCheck()->entry(),
   2737               Assembler::target_address_at(call_target_address,
   2738                                            unoptimized_code));
   2739     return INTERRUPT;
   2740   }
   2741 
   2742   DCHECK_EQ(kNopByteOne, *jns_instr_address);
   2743   DCHECK_EQ(kNopByteTwo, *(call_target_address - 2));
   2744 
   2745   DCHECK_EQ(
   2746       isolate->builtins()->OnStackReplacement()->entry(),
   2747       Assembler::target_address_at(call_target_address, unoptimized_code));
   2748   return ON_STACK_REPLACEMENT;
   2749 }
   2750 
   2751 }  // namespace internal
   2752 }  // namespace v8
   2753 
   2754 #endif  // V8_TARGET_ARCH_X64
   2755