Home | History | Annotate | Download | only in x64
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 
     28 #include "v8.h"
     29 
     30 #if V8_TARGET_ARCH_X64
     31 
     32 #include "code-stubs.h"
     33 #include "codegen.h"
     34 #include "compiler.h"
     35 #include "debug.h"
     36 #include "full-codegen.h"
     37 #include "isolate-inl.h"
     38 #include "parser.h"
     39 #include "scopes.h"
     40 #include "stub-cache.h"
     41 
     42 namespace v8 {
     43 namespace internal {
     44 
     45 #define __ ACCESS_MASM(masm_)
     46 
     47 
     48 class JumpPatchSite BASE_EMBEDDED {
     49  public:
     50   explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
     51 #ifdef DEBUG
     52     info_emitted_ = false;
     53 #endif
     54   }
     55 
     56   ~JumpPatchSite() {
     57     ASSERT(patch_site_.is_bound() == info_emitted_);
     58   }
     59 
     60   void EmitJumpIfNotSmi(Register reg,
     61                         Label* target,
     62                         Label::Distance near_jump = Label::kFar) {
     63     __ testb(reg, Immediate(kSmiTagMask));
     64     EmitJump(not_carry, target, near_jump);   // Always taken before patched.
     65   }
     66 
     67   void EmitJumpIfSmi(Register reg,
     68                      Label* target,
     69                      Label::Distance near_jump = Label::kFar) {
     70     __ testb(reg, Immediate(kSmiTagMask));
     71     EmitJump(carry, target, near_jump);  // Never taken before patched.
     72   }
     73 
     74   void EmitPatchInfo() {
     75     if (patch_site_.is_bound()) {
     76       int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
     77       ASSERT(is_int8(delta_to_patch_site));
     78       __ testl(rax, Immediate(delta_to_patch_site));
     79 #ifdef DEBUG
     80       info_emitted_ = true;
     81 #endif
     82     } else {
     83       __ nop();  // Signals no inlined code.
     84     }
     85   }
     86 
     87  private:
     88   // jc will be patched with jz, jnc will become jnz.
     89   void EmitJump(Condition cc, Label* target, Label::Distance near_jump) {
     90     ASSERT(!patch_site_.is_bound() && !info_emitted_);
     91     ASSERT(cc == carry || cc == not_carry);
     92     __ bind(&patch_site_);
     93     __ j(cc, target, near_jump);
     94   }
     95 
     96   MacroAssembler* masm_;
     97   Label patch_site_;
     98 #ifdef DEBUG
     99   bool info_emitted_;
    100 #endif
    101 };
    102 
    103 
    104 // Generate code for a JS function.  On entry to the function the receiver
    105 // and arguments have been pushed on the stack left to right, with the
    106 // return address on top of them.  The actual argument count matches the
    107 // formal parameter count expected by the function.
    108 //
    109 // The live registers are:
    110 //   o rdi: the JS function object being called (i.e. ourselves)
    111 //   o rsi: our context
    112 //   o rbp: our caller's frame pointer
    113 //   o rsp: stack pointer (pointing to return address)
    114 //
    115 // The function builds a JS frame.  Please see JavaScriptFrameConstants in
    116 // frames-x64.h for its layout.
    117 void FullCodeGenerator::Generate() {
    118   CompilationInfo* info = info_;
    119   handler_table_ =
    120       isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
    121   profiling_counter_ = isolate()->factory()->NewCell(
    122       Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
    123   SetFunctionPosition(function());
    124   Comment cmnt(masm_, "[ function compiled by full code generator");
    125 
    126   ProfileEntryHookStub::MaybeCallEntryHook(masm_);
    127 
    128 #ifdef DEBUG
    129   if (strlen(FLAG_stop_at) > 0 &&
    130       info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
    131     __ int3();
    132   }
    133 #endif
    134 
    135   // Strict mode functions and builtins need to replace the receiver
    136   // with undefined when called as functions (without an explicit
    137   // receiver object). rcx is zero for method calls and non-zero for
    138   // function calls.
    139   if (!info->is_classic_mode() || info->is_native()) {
    140     Label ok;
    141     __ testq(rcx, rcx);
    142     __ j(zero, &ok, Label::kNear);
    143     // +1 for return address.
    144     int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize;
    145     __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
    146     __ movq(Operand(rsp, receiver_offset), kScratchRegister);
    147     __ bind(&ok);
    148   }
    149 
    150   // Open a frame scope to indicate that there is a frame on the stack.  The
    151   // MANUAL indicates that the scope shouldn't actually generate code to set up
    152   // the frame (that is done below).
    153   FrameScope frame_scope(masm_, StackFrame::MANUAL);
    154 
    155   info->set_prologue_offset(masm_->pc_offset());
    156   __ push(rbp);  // Caller's frame pointer.
    157   __ movq(rbp, rsp);
    158   __ push(rsi);  // Callee's context.
    159   __ push(rdi);  // Callee's JS Function.
    160   info->AddNoFrameRange(0, masm_->pc_offset());
    161 
    162   { Comment cmnt(masm_, "[ Allocate locals");
    163     int locals_count = info->scope()->num_stack_slots();
    164     // Generators allocate locals, if any, in context slots.
    165     ASSERT(!info->function()->is_generator() || locals_count == 0);
    166     if (locals_count == 1) {
    167       __ PushRoot(Heap::kUndefinedValueRootIndex);
    168     } else if (locals_count > 1) {
    169       __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
    170       for (int i = 0; i < locals_count; i++) {
    171         __ push(rdx);
    172       }
    173     }
    174   }
    175 
    176   bool function_in_register = true;
    177 
    178   // Possibly allocate a local context.
    179   int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
    180   if (heap_slots > 0) {
    181     Comment cmnt(masm_, "[ Allocate context");
    182     // Argument to NewContext is the function, which is still in rdi.
    183     __ push(rdi);
    184     if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
    185       __ Push(info->scope()->GetScopeInfo());
    186       __ CallRuntime(Runtime::kNewGlobalContext, 2);
    187     } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
    188       FastNewContextStub stub(heap_slots);
    189       __ CallStub(&stub);
    190     } else {
    191       __ CallRuntime(Runtime::kNewFunctionContext, 1);
    192     }
    193     function_in_register = false;
    194     // Context is returned in both rax and rsi.  It replaces the context
    195     // passed to us.  It's saved in the stack and kept live in rsi.
    196     __ movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
    197 
    198     // Copy any necessary parameters into the context.
    199     int num_parameters = info->scope()->num_parameters();
    200     for (int i = 0; i < num_parameters; i++) {
    201       Variable* var = scope()->parameter(i);
    202       if (var->IsContextSlot()) {
    203         int parameter_offset = StandardFrameConstants::kCallerSPOffset +
    204             (num_parameters - 1 - i) * kPointerSize;
    205         // Load parameter from stack.
    206         __ movq(rax, Operand(rbp, parameter_offset));
    207         // Store it in the context.
    208         int context_offset = Context::SlotOffset(var->index());
    209         __ movq(Operand(rsi, context_offset), rax);
    210         // Update the write barrier.  This clobbers rax and rbx.
    211         __ RecordWriteContextSlot(
    212             rsi, context_offset, rax, rbx, kDontSaveFPRegs);
    213       }
    214     }
    215   }
    216 
    217   // Possibly allocate an arguments object.
    218   Variable* arguments = scope()->arguments();
    219   if (arguments != NULL) {
    220     // Arguments object must be allocated after the context object, in
    221     // case the "arguments" or ".arguments" variables are in the context.
    222     Comment cmnt(masm_, "[ Allocate arguments object");
    223     if (function_in_register) {
    224       __ push(rdi);
    225     } else {
    226       __ push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
    227     }
    228     // The receiver is just before the parameters on the caller's stack.
    229     int num_parameters = info->scope()->num_parameters();
    230     int offset = num_parameters * kPointerSize;
    231     __ lea(rdx,
    232            Operand(rbp, StandardFrameConstants::kCallerSPOffset + offset));
    233     __ push(rdx);
    234     __ Push(Smi::FromInt(num_parameters));
    235     // Arguments to ArgumentsAccessStub:
    236     //   function, receiver address, parameter count.
    237     // The stub will rewrite receiver and parameter count if the previous
    238     // stack frame was an arguments adapter frame.
    239     ArgumentsAccessStub::Type type;
    240     if (!is_classic_mode()) {
    241       type = ArgumentsAccessStub::NEW_STRICT;
    242     } else if (function()->has_duplicate_parameters()) {
    243       type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW;
    244     } else {
    245       type = ArgumentsAccessStub::NEW_NON_STRICT_FAST;
    246     }
    247     ArgumentsAccessStub stub(type);
    248     __ CallStub(&stub);
    249 
    250     SetVar(arguments, rax, rbx, rdx);
    251   }
    252 
    253   if (FLAG_trace) {
    254     __ CallRuntime(Runtime::kTraceEnter, 0);
    255   }
    256 
    257   // Visit the declarations and body unless there is an illegal
    258   // redeclaration.
    259   if (scope()->HasIllegalRedeclaration()) {
    260     Comment cmnt(masm_, "[ Declarations");
    261     scope()->VisitIllegalRedeclaration(this);
    262 
    263   } else {
    264     PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
    265     { Comment cmnt(masm_, "[ Declarations");
    266       // For named function expressions, declare the function name as a
    267       // constant.
    268       if (scope()->is_function_scope() && scope()->function() != NULL) {
    269         VariableDeclaration* function = scope()->function();
    270         ASSERT(function->proxy()->var()->mode() == CONST ||
    271                function->proxy()->var()->mode() == CONST_HARMONY);
    272         ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
    273         VisitVariableDeclaration(function);
    274       }
    275       VisitDeclarations(scope()->declarations());
    276     }
    277 
    278     { Comment cmnt(masm_, "[ Stack check");
    279       PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
    280       Label ok;
    281       __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
    282       __ j(above_equal, &ok, Label::kNear);
    283       StackCheckStub stub;
    284       __ CallStub(&stub);
    285       __ bind(&ok);
    286     }
    287 
    288     { Comment cmnt(masm_, "[ Body");
    289       ASSERT(loop_depth() == 0);
    290       VisitStatements(function()->body());
    291       ASSERT(loop_depth() == 0);
    292     }
    293   }
    294 
    295   // Always emit a 'return undefined' in case control fell off the end of
    296   // the body.
    297   { Comment cmnt(masm_, "[ return <undefined>;");
    298     __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
    299     EmitReturnSequence();
    300   }
    301 }
    302 
    303 
    304 void FullCodeGenerator::ClearAccumulator() {
    305   __ Set(rax, 0);
    306 }
    307 
    308 
    309 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
    310   __ movq(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
    311   __ SmiAddConstant(FieldOperand(rbx, Cell::kValueOffset),
    312                     Smi::FromInt(-delta));
    313 }
    314 
    315 
    316 void FullCodeGenerator::EmitProfilingCounterReset() {
    317   int reset_value = FLAG_interrupt_budget;
    318   if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) {
    319     // Self-optimization is a one-off thing; if it fails, don't try again.
    320     reset_value = Smi::kMaxValue;
    321   }
    322   __ movq(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
    323   __ movq(kScratchRegister,
    324           reinterpret_cast<uint64_t>(Smi::FromInt(reset_value)),
    325           RelocInfo::NONE64);
    326   __ movq(FieldOperand(rbx, Cell::kValueOffset), kScratchRegister);
    327 }
    328 
    329 
    330 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
    331                                                 Label* back_edge_target) {
    332   Comment cmnt(masm_, "[ Back edge bookkeeping");
    333   Label ok;
    334 
    335   int weight = 1;
    336   if (FLAG_weighted_back_edges) {
    337     ASSERT(back_edge_target->is_bound());
    338     int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
    339     weight = Min(kMaxBackEdgeWeight,
    340                  Max(1, distance / kCodeSizeMultiplier));
    341   }
    342   EmitProfilingCounterDecrement(weight);
    343   __ j(positive, &ok, Label::kNear);
    344   InterruptStub stub;
    345   __ CallStub(&stub);
    346 
    347   // Record a mapping of this PC offset to the OSR id.  This is used to find
    348   // the AST id from the unoptimized code in order to use it as a key into
    349   // the deoptimization input data found in the optimized code.
    350   RecordBackEdge(stmt->OsrEntryId());
    351 
    352   EmitProfilingCounterReset();
    353 
    354   __ bind(&ok);
    355   PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
    356   // Record a mapping of the OSR id to this PC.  This is used if the OSR
    357   // entry becomes the target of a bailout.  We don't expect it to be, but
    358   // we want it to work if it is.
    359   PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
    360 }
    361 
    362 
    363 void FullCodeGenerator::EmitReturnSequence() {
    364   Comment cmnt(masm_, "[ Return sequence");
    365   if (return_label_.is_bound()) {
    366     __ jmp(&return_label_);
    367   } else {
    368     __ bind(&return_label_);
    369     if (FLAG_trace) {
    370       __ push(rax);
    371       __ CallRuntime(Runtime::kTraceExit, 1);
    372     }
    373     if (FLAG_interrupt_at_exit || FLAG_self_optimization) {
    374       // Pretend that the exit is a backwards jump to the entry.
    375       int weight = 1;
    376       if (info_->ShouldSelfOptimize()) {
    377         weight = FLAG_interrupt_budget / FLAG_self_opt_count;
    378       } else if (FLAG_weighted_back_edges) {
    379         int distance = masm_->pc_offset();
    380         weight = Min(kMaxBackEdgeWeight,
    381                      Max(1, distance / kCodeSizeMultiplier));
    382       }
    383       EmitProfilingCounterDecrement(weight);
    384       Label ok;
    385       __ j(positive, &ok, Label::kNear);
    386       __ push(rax);
    387       if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) {
    388         __ push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
    389         __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
    390       } else {
    391         InterruptStub stub;
    392         __ CallStub(&stub);
    393       }
    394       __ pop(rax);
    395       EmitProfilingCounterReset();
    396       __ bind(&ok);
    397     }
    398 #ifdef DEBUG
    399     // Add a label for checking the size of the code used for returning.
    400     Label check_exit_codesize;
    401     masm_->bind(&check_exit_codesize);
    402 #endif
    403     CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
    404     __ RecordJSReturn();
    405     // Do not use the leave instruction here because it is too short to
    406     // patch with the code required by the debugger.
    407     __ movq(rsp, rbp);
    408     __ pop(rbp);
    409     int no_frame_start = masm_->pc_offset();
    410 
    411     int arguments_bytes = (info_->scope()->num_parameters() + 1) * kPointerSize;
    412     __ Ret(arguments_bytes, rcx);
    413 
    414 #ifdef ENABLE_DEBUGGER_SUPPORT
    415     // Add padding that will be overwritten by a debugger breakpoint.  We
    416     // have just generated at least 7 bytes: "movq rsp, rbp; pop rbp; ret k"
    417     // (3 + 1 + 3).
    418     const int kPadding = Assembler::kJSReturnSequenceLength - 7;
    419     for (int i = 0; i < kPadding; ++i) {
    420       masm_->int3();
    421     }
    422     // Check that the size of the code used for returning is large enough
    423     // for the debugger's requirements.
    424     ASSERT(Assembler::kJSReturnSequenceLength <=
    425            masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
    426 #endif
    427     info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
    428   }
    429 }
    430 
    431 
    432 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
    433   ASSERT(var->IsStackAllocated() || var->IsContextSlot());
    434 }
    435 
    436 
    437 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
    438   ASSERT(var->IsStackAllocated() || var->IsContextSlot());
    439   codegen()->GetVar(result_register(), var);
    440 }
    441 
    442 
    443 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
    444   ASSERT(var->IsStackAllocated() || var->IsContextSlot());
    445   MemOperand operand = codegen()->VarOperand(var, result_register());
    446   __ push(operand);
    447 }
    448 
    449 
    450 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
    451   codegen()->GetVar(result_register(), var);
    452   codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
    453   codegen()->DoTest(this);
    454 }
    455 
    456 
    457 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
    458 }
    459 
    460 
    461 void FullCodeGenerator::AccumulatorValueContext::Plug(
    462     Heap::RootListIndex index) const {
    463   __ LoadRoot(result_register(), index);
    464 }
    465 
    466 
    467 void FullCodeGenerator::StackValueContext::Plug(
    468     Heap::RootListIndex index) const {
    469   __ PushRoot(index);
    470 }
    471 
    472 
    473 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
    474   codegen()->PrepareForBailoutBeforeSplit(condition(),
    475                                           true,
    476                                           true_label_,
    477                                           false_label_);
    478   if (index == Heap::kUndefinedValueRootIndex ||
    479       index == Heap::kNullValueRootIndex ||
    480       index == Heap::kFalseValueRootIndex) {
    481     if (false_label_ != fall_through_) __ jmp(false_label_);
    482   } else if (index == Heap::kTrueValueRootIndex) {
    483     if (true_label_ != fall_through_) __ jmp(true_label_);
    484   } else {
    485     __ LoadRoot(result_register(), index);
    486     codegen()->DoTest(this);
    487   }
    488 }
    489 
    490 
    491 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
    492 }
    493 
    494 
    495 void FullCodeGenerator::AccumulatorValueContext::Plug(
    496     Handle<Object> lit) const {
    497   if (lit->IsSmi()) {
    498     __ SafeMove(result_register(), Smi::cast(*lit));
    499   } else {
    500     __ Move(result_register(), lit);
    501   }
    502 }
    503 
    504 
    505 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
    506   if (lit->IsSmi()) {
    507     __ SafePush(Smi::cast(*lit));
    508   } else {
    509     __ Push(lit);
    510   }
    511 }
    512 
    513 
    514 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
    515   codegen()->PrepareForBailoutBeforeSplit(condition(),
    516                                           true,
    517                                           true_label_,
    518                                           false_label_);
    519   ASSERT(!lit->IsUndetectableObject());  // There are no undetectable literals.
    520   if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
    521     if (false_label_ != fall_through_) __ jmp(false_label_);
    522   } else if (lit->IsTrue() || lit->IsJSObject()) {
    523     if (true_label_ != fall_through_) __ jmp(true_label_);
    524   } else if (lit->IsString()) {
    525     if (String::cast(*lit)->length() == 0) {
    526       if (false_label_ != fall_through_) __ jmp(false_label_);
    527     } else {
    528       if (true_label_ != fall_through_) __ jmp(true_label_);
    529     }
    530   } else if (lit->IsSmi()) {
    531     if (Smi::cast(*lit)->value() == 0) {
    532       if (false_label_ != fall_through_) __ jmp(false_label_);
    533     } else {
    534       if (true_label_ != fall_through_) __ jmp(true_label_);
    535     }
    536   } else {
    537     // For simplicity we always test the accumulator register.
    538     __ Move(result_register(), lit);
    539     codegen()->DoTest(this);
    540   }
    541 }
    542 
    543 
    544 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
    545                                                    Register reg) const {
    546   ASSERT(count > 0);
    547   __ Drop(count);
    548 }
    549 
    550 
    551 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
    552     int count,
    553     Register reg) const {
    554   ASSERT(count > 0);
    555   __ Drop(count);
    556   __ Move(result_register(), reg);
    557 }
    558 
    559 
    560 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
    561                                                        Register reg) const {
    562   ASSERT(count > 0);
    563   if (count > 1) __ Drop(count - 1);
    564   __ movq(Operand(rsp, 0), reg);
    565 }
    566 
    567 
    568 void FullCodeGenerator::TestContext::DropAndPlug(int count,
    569                                                  Register reg) const {
    570   ASSERT(count > 0);
    571   // For simplicity we always test the accumulator register.
    572   __ Drop(count);
    573   __ Move(result_register(), reg);
    574   codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
    575   codegen()->DoTest(this);
    576 }
    577 
    578 
    579 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
    580                                             Label* materialize_false) const {
    581   ASSERT(materialize_true == materialize_false);
    582   __ bind(materialize_true);
    583 }
    584 
    585 
    586 void FullCodeGenerator::AccumulatorValueContext::Plug(
    587     Label* materialize_true,
    588     Label* materialize_false) const {
    589   Label done;
    590   __ bind(materialize_true);
    591   __ Move(result_register(), isolate()->factory()->true_value());
    592   __ jmp(&done, Label::kNear);
    593   __ bind(materialize_false);
    594   __ Move(result_register(), isolate()->factory()->false_value());
    595   __ bind(&done);
    596 }
    597 
    598 
    599 void FullCodeGenerator::StackValueContext::Plug(
    600     Label* materialize_true,
    601     Label* materialize_false) const {
    602   Label done;
    603   __ bind(materialize_true);
    604   __ Push(isolate()->factory()->true_value());
    605   __ jmp(&done, Label::kNear);
    606   __ bind(materialize_false);
    607   __ Push(isolate()->factory()->false_value());
    608   __ bind(&done);
    609 }
    610 
    611 
    612 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
    613                                           Label* materialize_false) const {
    614   ASSERT(materialize_true == true_label_);
    615   ASSERT(materialize_false == false_label_);
    616 }
    617 
    618 
    619 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
    620 }
    621 
    622 
    623 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
    624   Heap::RootListIndex value_root_index =
    625       flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
    626   __ LoadRoot(result_register(), value_root_index);
    627 }
    628 
    629 
    630 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
    631   Heap::RootListIndex value_root_index =
    632       flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
    633   __ PushRoot(value_root_index);
    634 }
    635 
    636 
    637 void FullCodeGenerator::TestContext::Plug(bool flag) const {
    638   codegen()->PrepareForBailoutBeforeSplit(condition(),
    639                                           true,
    640                                           true_label_,
    641                                           false_label_);
    642   if (flag) {
    643     if (true_label_ != fall_through_) __ jmp(true_label_);
    644   } else {
    645     if (false_label_ != fall_through_) __ jmp(false_label_);
    646   }
    647 }
    648 
    649 
    650 void FullCodeGenerator::DoTest(Expression* condition,
    651                                Label* if_true,
    652                                Label* if_false,
    653                                Label* fall_through) {
    654   Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
    655   CallIC(ic, RelocInfo::CODE_TARGET, condition->test_id());
    656   __ testq(result_register(), result_register());
    657   // The stub returns nonzero for true.
    658   Split(not_zero, if_true, if_false, fall_through);
    659 }
    660 
    661 
    662 void FullCodeGenerator::Split(Condition cc,
    663                               Label* if_true,
    664                               Label* if_false,
    665                               Label* fall_through) {
    666   if (if_false == fall_through) {
    667     __ j(cc, if_true);
    668   } else if (if_true == fall_through) {
    669     __ j(NegateCondition(cc), if_false);
    670   } else {
    671     __ j(cc, if_true);
    672     __ jmp(if_false);
    673   }
    674 }
    675 
    676 
    677 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
    678   ASSERT(var->IsStackAllocated());
    679   // Offset is negative because higher indexes are at lower addresses.
    680   int offset = -var->index() * kPointerSize;
    681   // Adjust by a (parameter or local) base offset.
    682   if (var->IsParameter()) {
    683     offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
    684   } else {
    685     offset += JavaScriptFrameConstants::kLocal0Offset;
    686   }
    687   return Operand(rbp, offset);
    688 }
    689 
    690 
    691 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
    692   ASSERT(var->IsContextSlot() || var->IsStackAllocated());
    693   if (var->IsContextSlot()) {
    694     int context_chain_length = scope()->ContextChainLength(var->scope());
    695     __ LoadContext(scratch, context_chain_length);
    696     return ContextOperand(scratch, var->index());
    697   } else {
    698     return StackOperand(var);
    699   }
    700 }
    701 
    702 
    703 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
    704   ASSERT(var->IsContextSlot() || var->IsStackAllocated());
    705   MemOperand location = VarOperand(var, dest);
    706   __ movq(dest, location);
    707 }
    708 
    709 
    710 void FullCodeGenerator::SetVar(Variable* var,
    711                                Register src,
    712                                Register scratch0,
    713                                Register scratch1) {
    714   ASSERT(var->IsContextSlot() || var->IsStackAllocated());
    715   ASSERT(!scratch0.is(src));
    716   ASSERT(!scratch0.is(scratch1));
    717   ASSERT(!scratch1.is(src));
    718   MemOperand location = VarOperand(var, scratch0);
    719   __ movq(location, src);
    720 
    721   // Emit the write barrier code if the location is in the heap.
    722   if (var->IsContextSlot()) {
    723     int offset = Context::SlotOffset(var->index());
    724     __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
    725   }
    726 }
    727 
    728 
    729 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
    730                                                      bool should_normalize,
    731                                                      Label* if_true,
    732                                                      Label* if_false) {
    733   // Only prepare for bailouts before splits if we're in a test
    734   // context. Otherwise, we let the Visit function deal with the
    735   // preparation to avoid preparing with the same AST id twice.
    736   if (!context()->IsTest() || !info_->IsOptimizable()) return;
    737 
    738   Label skip;
    739   if (should_normalize) __ jmp(&skip, Label::kNear);
    740   PrepareForBailout(expr, TOS_REG);
    741   if (should_normalize) {
    742     __ CompareRoot(rax, Heap::kTrueValueRootIndex);
    743     Split(equal, if_true, if_false, NULL);
    744     __ bind(&skip);
    745   }
    746 }
    747 
    748 
    749 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
    750   // The variable in the declaration always resides in the current context.
    751   ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
    752   if (generate_debug_code_) {
    753     // Check that we're not inside a with or catch context.
    754     __ movq(rbx, FieldOperand(rsi, HeapObject::kMapOffset));
    755     __ CompareRoot(rbx, Heap::kWithContextMapRootIndex);
    756     __ Check(not_equal, kDeclarationInWithContext);
    757     __ CompareRoot(rbx, Heap::kCatchContextMapRootIndex);
    758     __ Check(not_equal, kDeclarationInCatchContext);
    759   }
    760 }
    761 
    762 
    763 void FullCodeGenerator::VisitVariableDeclaration(
    764     VariableDeclaration* declaration) {
    765   // If it was not possible to allocate the variable at compile time, we
    766   // need to "declare" it at runtime to make sure it actually exists in the
    767   // local context.
    768   VariableProxy* proxy = declaration->proxy();
    769   VariableMode mode = declaration->mode();
    770   Variable* variable = proxy->var();
    771   bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET;
    772   switch (variable->location()) {
    773     case Variable::UNALLOCATED:
    774       globals_->Add(variable->name(), zone());
    775       globals_->Add(variable->binding_needs_init()
    776                         ? isolate()->factory()->the_hole_value()
    777                     : isolate()->factory()->undefined_value(),
    778                     zone());
    779       break;
    780 
    781     case Variable::PARAMETER:
    782     case Variable::LOCAL:
    783       if (hole_init) {
    784         Comment cmnt(masm_, "[ VariableDeclaration");
    785         __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
    786         __ movq(StackOperand(variable), kScratchRegister);
    787       }
    788       break;
    789 
    790     case Variable::CONTEXT:
    791       if (hole_init) {
    792         Comment cmnt(masm_, "[ VariableDeclaration");
    793         EmitDebugCheckDeclarationContext(variable);
    794         __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
    795         __ movq(ContextOperand(rsi, variable->index()), kScratchRegister);
    796         // No write barrier since the hole value is in old space.
    797         PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
    798       }
    799       break;
    800 
    801     case Variable::LOOKUP: {
    802       Comment cmnt(masm_, "[ VariableDeclaration");
    803       __ push(rsi);
    804       __ Push(variable->name());
    805       // Declaration nodes are always introduced in one of four modes.
    806       ASSERT(IsDeclaredVariableMode(mode));
    807       PropertyAttributes attr =
    808           IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
    809       __ Push(Smi::FromInt(attr));
    810       // Push initial value, if any.
    811       // Note: For variables we must not push an initial value (such as
    812       // 'undefined') because we may have a (legal) redeclaration and we
    813       // must not destroy the current value.
    814       if (hole_init) {
    815         __ PushRoot(Heap::kTheHoleValueRootIndex);
    816       } else {
    817         __ Push(Smi::FromInt(0));  // Indicates no initial value.
    818       }
    819       __ CallRuntime(Runtime::kDeclareContextSlot, 4);
    820       break;
    821     }
    822   }
    823 }
    824 
    825 
    826 void FullCodeGenerator::VisitFunctionDeclaration(
    827     FunctionDeclaration* declaration) {
    828   VariableProxy* proxy = declaration->proxy();
    829   Variable* variable = proxy->var();
    830   switch (variable->location()) {
    831     case Variable::UNALLOCATED: {
    832       globals_->Add(variable->name(), zone());
    833       Handle<SharedFunctionInfo> function =
    834           Compiler::BuildFunctionInfo(declaration->fun(), script());
    835       // Check for stack-overflow exception.
    836       if (function.is_null()) return SetStackOverflow();
    837       globals_->Add(function, zone());
    838       break;
    839     }
    840 
    841     case Variable::PARAMETER:
    842     case Variable::LOCAL: {
    843       Comment cmnt(masm_, "[ FunctionDeclaration");
    844       VisitForAccumulatorValue(declaration->fun());
    845       __ movq(StackOperand(variable), result_register());
    846       break;
    847     }
    848 
    849     case Variable::CONTEXT: {
    850       Comment cmnt(masm_, "[ FunctionDeclaration");
    851       EmitDebugCheckDeclarationContext(variable);
    852       VisitForAccumulatorValue(declaration->fun());
    853       __ movq(ContextOperand(rsi, variable->index()), result_register());
    854       int offset = Context::SlotOffset(variable->index());
    855       // We know that we have written a function, which is not a smi.
    856       __ RecordWriteContextSlot(rsi,
    857                                 offset,
    858                                 result_register(),
    859                                 rcx,
    860                                 kDontSaveFPRegs,
    861                                 EMIT_REMEMBERED_SET,
    862                                 OMIT_SMI_CHECK);
    863       PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
    864       break;
    865     }
    866 
    867     case Variable::LOOKUP: {
    868       Comment cmnt(masm_, "[ FunctionDeclaration");
    869       __ push(rsi);
    870       __ Push(variable->name());
    871       __ Push(Smi::FromInt(NONE));
    872       VisitForStackValue(declaration->fun());
    873       __ CallRuntime(Runtime::kDeclareContextSlot, 4);
    874       break;
    875     }
    876   }
    877 }
    878 
    879 
    880 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
    881   Variable* variable = declaration->proxy()->var();
    882   ASSERT(variable->location() == Variable::CONTEXT);
    883   ASSERT(variable->interface()->IsFrozen());
    884 
    885   Comment cmnt(masm_, "[ ModuleDeclaration");
    886   EmitDebugCheckDeclarationContext(variable);
    887 
    888   // Load instance object.
    889   __ LoadContext(rax, scope_->ContextChainLength(scope_->GlobalScope()));
    890   __ movq(rax, ContextOperand(rax, variable->interface()->Index()));
    891   __ movq(rax, ContextOperand(rax, Context::EXTENSION_INDEX));
    892 
    893   // Assign it.
    894   __ movq(ContextOperand(rsi, variable->index()), rax);
    895   // We know that we have written a module, which is not a smi.
    896   __ RecordWriteContextSlot(rsi,
    897                             Context::SlotOffset(variable->index()),
    898                             rax,
    899                             rcx,
    900                             kDontSaveFPRegs,
    901                             EMIT_REMEMBERED_SET,
    902                             OMIT_SMI_CHECK);
    903   PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
    904 
    905   // Traverse into body.
    906   Visit(declaration->module());
    907 }
    908 
    909 
    910 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
    911   VariableProxy* proxy = declaration->proxy();
    912   Variable* variable = proxy->var();
    913   switch (variable->location()) {
    914     case Variable::UNALLOCATED:
    915       // TODO(rossberg)
    916       break;
    917 
    918     case Variable::CONTEXT: {
    919       Comment cmnt(masm_, "[ ImportDeclaration");
    920       EmitDebugCheckDeclarationContext(variable);
    921       // TODO(rossberg)
    922       break;
    923     }
    924 
    925     case Variable::PARAMETER:
    926     case Variable::LOCAL:
    927     case Variable::LOOKUP:
    928       UNREACHABLE();
    929   }
    930 }
    931 
    932 
    933 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
    934   // TODO(rossberg)
    935 }
    936 
    937 
    938 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
    939   // Call the runtime to declare the globals.
    940   __ push(rsi);  // The context is the first argument.
    941   __ Push(pairs);
    942   __ Push(Smi::FromInt(DeclareGlobalsFlags()));
    943   __ CallRuntime(Runtime::kDeclareGlobals, 3);
    944   // Return value is ignored.
    945 }
    946 
    947 
    948 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
    949   // Call the runtime to declare the modules.
    950   __ Push(descriptions);
    951   __ CallRuntime(Runtime::kDeclareModules, 1);
    952   // Return value is ignored.
    953 }
    954 
    955 
    956 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
    957   Comment cmnt(masm_, "[ SwitchStatement");
    958   Breakable nested_statement(this, stmt);
    959   SetStatementPosition(stmt);
    960 
    961   // Keep the switch value on the stack until a case matches.
    962   VisitForStackValue(stmt->tag());
    963   PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
    964 
    965   ZoneList<CaseClause*>* clauses = stmt->cases();
    966   CaseClause* default_clause = NULL;  // Can occur anywhere in the list.
    967 
    968   Label next_test;  // Recycled for each test.
    969   // Compile all the tests with branches to their bodies.
    970   for (int i = 0; i < clauses->length(); i++) {
    971     CaseClause* clause = clauses->at(i);
    972     clause->body_target()->Unuse();
    973 
    974     // The default is not a test, but remember it as final fall through.
    975     if (clause->is_default()) {
    976       default_clause = clause;
    977       continue;
    978     }
    979 
    980     Comment cmnt(masm_, "[ Case comparison");
    981     __ bind(&next_test);
    982     next_test.Unuse();
    983 
    984     // Compile the label expression.
    985     VisitForAccumulatorValue(clause->label());
    986 
    987     // Perform the comparison as if via '==='.
    988     __ movq(rdx, Operand(rsp, 0));  // Switch value.
    989     bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
    990     JumpPatchSite patch_site(masm_);
    991     if (inline_smi_code) {
    992       Label slow_case;
    993       __ movq(rcx, rdx);
    994       __ or_(rcx, rax);
    995       patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
    996 
    997       __ cmpq(rdx, rax);
    998       __ j(not_equal, &next_test);
    999       __ Drop(1);  // Switch value is no longer needed.
   1000       __ jmp(clause->body_target());
   1001       __ bind(&slow_case);
   1002     }
   1003 
   1004     // Record position before stub call for type feedback.
   1005     SetSourcePosition(clause->position());
   1006     Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT);
   1007     CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId());
   1008     patch_site.EmitPatchInfo();
   1009 
   1010     __ testq(rax, rax);
   1011     __ j(not_equal, &next_test);
   1012     __ Drop(1);  // Switch value is no longer needed.
   1013     __ jmp(clause->body_target());
   1014   }
   1015 
   1016   // Discard the test value and jump to the default if present, otherwise to
   1017   // the end of the statement.
   1018   __ bind(&next_test);
   1019   __ Drop(1);  // Switch value is no longer needed.
   1020   if (default_clause == NULL) {
   1021     __ jmp(nested_statement.break_label());
   1022   } else {
   1023     __ jmp(default_clause->body_target());
   1024   }
   1025 
   1026   // Compile all the case bodies.
   1027   for (int i = 0; i < clauses->length(); i++) {
   1028     Comment cmnt(masm_, "[ Case body");
   1029     CaseClause* clause = clauses->at(i);
   1030     __ bind(clause->body_target());
   1031     PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
   1032     VisitStatements(clause->statements());
   1033   }
   1034 
   1035   __ bind(nested_statement.break_label());
   1036   PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
   1037 }
   1038 
   1039 
   1040 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
   1041   Comment cmnt(masm_, "[ ForInStatement");
   1042   SetStatementPosition(stmt);
   1043 
   1044   Label loop, exit;
   1045   ForIn loop_statement(this, stmt);
   1046   increment_loop_depth();
   1047 
   1048   // Get the object to enumerate over. If the object is null or undefined, skip
   1049   // over the loop.  See ECMA-262 version 5, section 12.6.4.
   1050   VisitForAccumulatorValue(stmt->enumerable());
   1051   __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
   1052   __ j(equal, &exit);
   1053   Register null_value = rdi;
   1054   __ LoadRoot(null_value, Heap::kNullValueRootIndex);
   1055   __ cmpq(rax, null_value);
   1056   __ j(equal, &exit);
   1057 
   1058   PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
   1059 
   1060   // Convert the object to a JS object.
   1061   Label convert, done_convert;
   1062   __ JumpIfSmi(rax, &convert);
   1063   __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
   1064   __ j(above_equal, &done_convert);
   1065   __ bind(&convert);
   1066   __ push(rax);
   1067   __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
   1068   __ bind(&done_convert);
   1069   __ push(rax);
   1070 
   1071   // Check for proxies.
   1072   Label call_runtime;
   1073   STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
   1074   __ CmpObjectType(rax, LAST_JS_PROXY_TYPE, rcx);
   1075   __ j(below_equal, &call_runtime);
   1076 
   1077   // Check cache validity in generated code. This is a fast case for
   1078   // the JSObject::IsSimpleEnum cache validity checks. If we cannot
   1079   // guarantee cache validity, call the runtime system to check cache
   1080   // validity or get the property names in a fixed array.
   1081   __ CheckEnumCache(null_value, &call_runtime);
   1082 
   1083   // The enum cache is valid.  Load the map of the object being
   1084   // iterated over and use the cache for the iteration.
   1085   Label use_cache;
   1086   __ movq(rax, FieldOperand(rax, HeapObject::kMapOffset));
   1087   __ jmp(&use_cache, Label::kNear);
   1088 
   1089   // Get the set of properties to enumerate.
   1090   __ bind(&call_runtime);
   1091   __ push(rax);  // Duplicate the enumerable object on the stack.
   1092   __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
   1093 
   1094   // If we got a map from the runtime call, we can do a fast
   1095   // modification check. Otherwise, we got a fixed array, and we have
   1096   // to do a slow check.
   1097   Label fixed_array;
   1098   __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
   1099                  Heap::kMetaMapRootIndex);
   1100   __ j(not_equal, &fixed_array);
   1101 
   1102   // We got a map in register rax. Get the enumeration cache from it.
   1103   __ bind(&use_cache);
   1104 
   1105   Label no_descriptors;
   1106 
   1107   __ EnumLength(rdx, rax);
   1108   __ Cmp(rdx, Smi::FromInt(0));
   1109   __ j(equal, &no_descriptors);
   1110 
   1111   __ LoadInstanceDescriptors(rax, rcx);
   1112   __ movq(rcx, FieldOperand(rcx, DescriptorArray::kEnumCacheOffset));
   1113   __ movq(rcx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset));
   1114 
   1115   // Set up the four remaining stack slots.
   1116   __ push(rax);  // Map.
   1117   __ push(rcx);  // Enumeration cache.
   1118   __ push(rdx);  // Number of valid entries for the map in the enum cache.
   1119   __ Push(Smi::FromInt(0));  // Initial index.
   1120   __ jmp(&loop);
   1121 
   1122   __ bind(&no_descriptors);
   1123   __ addq(rsp, Immediate(kPointerSize));
   1124   __ jmp(&exit);
   1125 
   1126   // We got a fixed array in register rax. Iterate through that.
   1127   Label non_proxy;
   1128   __ bind(&fixed_array);
   1129 
   1130   Handle<Cell> cell = isolate()->factory()->NewCell(
   1131       Handle<Object>(Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker),
   1132                      isolate()));
   1133   RecordTypeFeedbackCell(stmt->ForInFeedbackId(), cell);
   1134   __ LoadHeapObject(rbx, cell);
   1135   __ Move(FieldOperand(rbx, Cell::kValueOffset),
   1136           Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker));
   1137 
   1138   __ Move(rbx, Smi::FromInt(1));  // Smi indicates slow check
   1139   __ movq(rcx, Operand(rsp, 0 * kPointerSize));  // Get enumerated object
   1140   STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
   1141   __ CmpObjectType(rcx, LAST_JS_PROXY_TYPE, rcx);
   1142   __ j(above, &non_proxy);
   1143   __ Move(rbx, Smi::FromInt(0));  // Zero indicates proxy
   1144   __ bind(&non_proxy);
   1145   __ push(rbx);  // Smi
   1146   __ push(rax);  // Array
   1147   __ movq(rax, FieldOperand(rax, FixedArray::kLengthOffset));
   1148   __ push(rax);  // Fixed array length (as smi).
   1149   __ Push(Smi::FromInt(0));  // Initial index.
   1150 
   1151   // Generate code for doing the condition check.
   1152   PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
   1153   __ bind(&loop);
   1154   __ movq(rax, Operand(rsp, 0 * kPointerSize));  // Get the current index.
   1155   __ cmpq(rax, Operand(rsp, 1 * kPointerSize));  // Compare to the array length.
   1156   __ j(above_equal, loop_statement.break_label());
   1157 
   1158   // Get the current entry of the array into register rbx.
   1159   __ movq(rbx, Operand(rsp, 2 * kPointerSize));
   1160   SmiIndex index = masm()->SmiToIndex(rax, rax, kPointerSizeLog2);
   1161   __ movq(rbx, FieldOperand(rbx,
   1162                             index.reg,
   1163                             index.scale,
   1164                             FixedArray::kHeaderSize));
   1165 
   1166   // Get the expected map from the stack or a smi in the
   1167   // permanent slow case into register rdx.
   1168   __ movq(rdx, Operand(rsp, 3 * kPointerSize));
   1169 
   1170   // Check if the expected map still matches that of the enumerable.
   1171   // If not, we may have to filter the key.
   1172   Label update_each;
   1173   __ movq(rcx, Operand(rsp, 4 * kPointerSize));
   1174   __ cmpq(rdx, FieldOperand(rcx, HeapObject::kMapOffset));
   1175   __ j(equal, &update_each, Label::kNear);
   1176 
   1177   // For proxies, no filtering is done.
   1178   // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
   1179   __ Cmp(rdx, Smi::FromInt(0));
   1180   __ j(equal, &update_each, Label::kNear);
   1181 
   1182   // Convert the entry to a string or null if it isn't a property
   1183   // anymore. If the property has been removed while iterating, we
   1184   // just skip it.
   1185   __ push(rcx);  // Enumerable.
   1186   __ push(rbx);  // Current entry.
   1187   __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
   1188   __ Cmp(rax, Smi::FromInt(0));
   1189   __ j(equal, loop_statement.continue_label());
   1190   __ movq(rbx, rax);
   1191 
   1192   // Update the 'each' property or variable from the possibly filtered
   1193   // entry in register rbx.
   1194   __ bind(&update_each);
   1195   __ movq(result_register(), rbx);
   1196   // Perform the assignment as if via '='.
   1197   { EffectContext context(this);
   1198     EmitAssignment(stmt->each());
   1199   }
   1200 
   1201   // Generate code for the body of the loop.
   1202   Visit(stmt->body());
   1203 
   1204   // Generate code for going to the next element by incrementing the
   1205   // index (smi) stored on top of the stack.
   1206   __ bind(loop_statement.continue_label());
   1207   __ SmiAddConstant(Operand(rsp, 0 * kPointerSize), Smi::FromInt(1));
   1208 
   1209   EmitBackEdgeBookkeeping(stmt, &loop);
   1210   __ jmp(&loop);
   1211 
   1212   // Remove the pointers stored on the stack.
   1213   __ bind(loop_statement.break_label());
   1214   __ addq(rsp, Immediate(5 * kPointerSize));
   1215 
   1216   // Exit and decrement the loop depth.
   1217   PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
   1218   __ bind(&exit);
   1219   decrement_loop_depth();
   1220 }
   1221 
   1222 
   1223 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
   1224   Comment cmnt(masm_, "[ ForOfStatement");
   1225   SetStatementPosition(stmt);
   1226 
   1227   Iteration loop_statement(this, stmt);
   1228   increment_loop_depth();
   1229 
   1230   // var iterator = iterable[@@iterator]()
   1231   VisitForAccumulatorValue(stmt->assign_iterator());
   1232 
   1233   // As with for-in, skip the loop if the iterator is null or undefined.
   1234   __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
   1235   __ j(equal, loop_statement.break_label());
   1236   __ CompareRoot(rax, Heap::kNullValueRootIndex);
   1237   __ j(equal, loop_statement.break_label());
   1238 
   1239   // Convert the iterator to a JS object.
   1240   Label convert, done_convert;
   1241   __ JumpIfSmi(rax, &convert);
   1242   __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
   1243   __ j(above_equal, &done_convert);
   1244   __ bind(&convert);
   1245   __ push(rax);
   1246   __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
   1247   __ bind(&done_convert);
   1248 
   1249   // Loop entry.
   1250   __ bind(loop_statement.continue_label());
   1251 
   1252   // result = iterator.next()
   1253   VisitForEffect(stmt->next_result());
   1254 
   1255   // if (result.done) break;
   1256   Label result_not_done;
   1257   VisitForControl(stmt->result_done(),
   1258                   loop_statement.break_label(),
   1259                   &result_not_done,
   1260                   &result_not_done);
   1261   __ bind(&result_not_done);
   1262 
   1263   // each = result.value
   1264   VisitForEffect(stmt->assign_each());
   1265 
   1266   // Generate code for the body of the loop.
   1267   Visit(stmt->body());
   1268 
   1269   // Check stack before looping.
   1270   PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
   1271   EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
   1272   __ jmp(loop_statement.continue_label());
   1273 
   1274   // Exit and decrement the loop depth.
   1275   PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
   1276   __ bind(loop_statement.break_label());
   1277   decrement_loop_depth();
   1278 }
   1279 
   1280 
   1281 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
   1282                                        bool pretenure) {
   1283   // Use the fast case closure allocation code that allocates in new
   1284   // space for nested functions that don't need literals cloning. If
   1285   // we're running with the --always-opt or the --prepare-always-opt
   1286   // flag, we need to use the runtime function so that the new function
   1287   // we are creating here gets a chance to have its code optimized and
   1288   // doesn't just get a copy of the existing unoptimized code.
   1289   if (!FLAG_always_opt &&
   1290       !FLAG_prepare_always_opt &&
   1291       !pretenure &&
   1292       scope()->is_function_scope() &&
   1293       info->num_literals() == 0) {
   1294     FastNewClosureStub stub(info->language_mode(), info->is_generator());
   1295     __ Push(info);
   1296     __ CallStub(&stub);
   1297   } else {
   1298     __ push(rsi);
   1299     __ Push(info);
   1300     __ Push(pretenure
   1301             ? isolate()->factory()->true_value()
   1302             : isolate()->factory()->false_value());
   1303     __ CallRuntime(Runtime::kNewClosure, 3);
   1304   }
   1305   context()->Plug(rax);
   1306 }
   1307 
   1308 
   1309 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
   1310   Comment cmnt(masm_, "[ VariableProxy");
   1311   EmitVariableLoad(expr);
   1312 }
   1313 
   1314 
   1315 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
   1316                                                       TypeofState typeof_state,
   1317                                                       Label* slow) {
   1318   Register context = rsi;
   1319   Register temp = rdx;
   1320 
   1321   Scope* s = scope();
   1322   while (s != NULL) {
   1323     if (s->num_heap_slots() > 0) {
   1324       if (s->calls_non_strict_eval()) {
   1325         // Check that extension is NULL.
   1326         __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX),
   1327                 Immediate(0));
   1328         __ j(not_equal, slow);
   1329       }
   1330       // Load next context in chain.
   1331       __ movq(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
   1332       // Walk the rest of the chain without clobbering rsi.
   1333       context = temp;
   1334     }
   1335     // If no outer scope calls eval, we do not need to check more
   1336     // context extensions.  If we have reached an eval scope, we check
   1337     // all extensions from this point.
   1338     if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break;
   1339     s = s->outer_scope();
   1340   }
   1341 
   1342   if (s != NULL && s->is_eval_scope()) {
   1343     // Loop up the context chain.  There is no frame effect so it is
   1344     // safe to use raw labels here.
   1345     Label next, fast;
   1346     if (!context.is(temp)) {
   1347       __ movq(temp, context);
   1348     }
   1349     // Load map for comparison into register, outside loop.
   1350     __ LoadRoot(kScratchRegister, Heap::kNativeContextMapRootIndex);
   1351     __ bind(&next);
   1352     // Terminate at native context.
   1353     __ cmpq(kScratchRegister, FieldOperand(temp, HeapObject::kMapOffset));
   1354     __ j(equal, &fast, Label::kNear);
   1355     // Check that extension is NULL.
   1356     __ cmpq(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
   1357     __ j(not_equal, slow);
   1358     // Load next context in chain.
   1359     __ movq(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
   1360     __ jmp(&next);
   1361     __ bind(&fast);
   1362   }
   1363 
   1364   // All extension objects were empty and it is safe to use a global
   1365   // load IC call.
   1366   __ movq(rax, GlobalObjectOperand());
   1367   __ Move(rcx, var->name());
   1368   Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
   1369   RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
   1370       ? RelocInfo::CODE_TARGET
   1371       : RelocInfo::CODE_TARGET_CONTEXT;
   1372   CallIC(ic, mode);
   1373 }
   1374 
   1375 
   1376 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
   1377                                                                 Label* slow) {
   1378   ASSERT(var->IsContextSlot());
   1379   Register context = rsi;
   1380   Register temp = rbx;
   1381 
   1382   for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
   1383     if (s->num_heap_slots() > 0) {
   1384       if (s->calls_non_strict_eval()) {
   1385         // Check that extension is NULL.
   1386         __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX),
   1387                 Immediate(0));
   1388         __ j(not_equal, slow);
   1389       }
   1390       __ movq(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
   1391       // Walk the rest of the chain without clobbering rsi.
   1392       context = temp;
   1393     }
   1394   }
   1395   // Check that last extension is NULL.
   1396   __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
   1397   __ j(not_equal, slow);
   1398 
   1399   // This function is used only for loads, not stores, so it's safe to
   1400   // return an rsi-based operand (the write barrier cannot be allowed to
   1401   // destroy the rsi register).
   1402   return ContextOperand(context, var->index());
   1403 }
   1404 
   1405 
   1406 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
   1407                                                   TypeofState typeof_state,
   1408                                                   Label* slow,
   1409                                                   Label* done) {
   1410   // Generate fast-case code for variables that might be shadowed by
   1411   // eval-introduced variables.  Eval is used a lot without
   1412   // introducing variables.  In those cases, we do not want to
   1413   // perform a runtime call for all variables in the scope
   1414   // containing the eval.
   1415   if (var->mode() == DYNAMIC_GLOBAL) {
   1416     EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
   1417     __ jmp(done);
   1418   } else if (var->mode() == DYNAMIC_LOCAL) {
   1419     Variable* local = var->local_if_not_shadowed();
   1420     __ movq(rax, ContextSlotOperandCheckExtensions(local, slow));
   1421     if (local->mode() == LET ||
   1422         local->mode() == CONST ||
   1423         local->mode() == CONST_HARMONY) {
   1424       __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
   1425       __ j(not_equal, done);
   1426       if (local->mode() == CONST) {
   1427         __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
   1428       } else {  // LET || CONST_HARMONY
   1429         __ Push(var->name());
   1430         __ CallRuntime(Runtime::kThrowReferenceError, 1);
   1431       }
   1432     }
   1433     __ jmp(done);
   1434   }
   1435 }
   1436 
   1437 
   1438 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
   1439   // Record position before possible IC call.
   1440   SetSourcePosition(proxy->position());
   1441   Variable* var = proxy->var();
   1442 
   1443   // Three cases: global variables, lookup variables, and all other types of
   1444   // variables.
   1445   switch (var->location()) {
   1446     case Variable::UNALLOCATED: {
   1447       Comment cmnt(masm_, "Global variable");
   1448       // Use inline caching. Variable name is passed in rcx and the global
   1449       // object on the stack.
   1450       __ Move(rcx, var->name());
   1451       __ movq(rax, GlobalObjectOperand());
   1452       Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
   1453       CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
   1454       context()->Plug(rax);
   1455       break;
   1456     }
   1457 
   1458     case Variable::PARAMETER:
   1459     case Variable::LOCAL:
   1460     case Variable::CONTEXT: {
   1461       Comment cmnt(masm_, var->IsContextSlot() ? "Context slot" : "Stack slot");
   1462       if (var->binding_needs_init()) {
   1463         // var->scope() may be NULL when the proxy is located in eval code and
   1464         // refers to a potential outside binding. Currently those bindings are
   1465         // always looked up dynamically, i.e. in that case
   1466         //     var->location() == LOOKUP.
   1467         // always holds.
   1468         ASSERT(var->scope() != NULL);
   1469 
   1470         // Check if the binding really needs an initialization check. The check
   1471         // can be skipped in the following situation: we have a LET or CONST
   1472         // binding in harmony mode, both the Variable and the VariableProxy have
   1473         // the same declaration scope (i.e. they are both in global code, in the
   1474         // same function or in the same eval code) and the VariableProxy is in
   1475         // the source physically located after the initializer of the variable.
   1476         //
   1477         // We cannot skip any initialization checks for CONST in non-harmony
   1478         // mode because const variables may be declared but never initialized:
   1479         //   if (false) { const x; }; var y = x;
   1480         //
   1481         // The condition on the declaration scopes is a conservative check for
   1482         // nested functions that access a binding and are called before the
   1483         // binding is initialized:
   1484         //   function() { f(); let x = 1; function f() { x = 2; } }
   1485         //
   1486         bool skip_init_check;
   1487         if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
   1488           skip_init_check = false;
   1489         } else {
   1490           // Check that we always have valid source position.
   1491           ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
   1492           ASSERT(proxy->position() != RelocInfo::kNoPosition);
   1493           skip_init_check = var->mode() != CONST &&
   1494               var->initializer_position() < proxy->position();
   1495         }
   1496 
   1497         if (!skip_init_check) {
   1498           // Let and const need a read barrier.
   1499           Label done;
   1500           GetVar(rax, var);
   1501           __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
   1502           __ j(not_equal, &done, Label::kNear);
   1503           if (var->mode() == LET || var->mode() == CONST_HARMONY) {
   1504             // Throw a reference error when using an uninitialized let/const
   1505             // binding in harmony mode.
   1506             __ Push(var->name());
   1507             __ CallRuntime(Runtime::kThrowReferenceError, 1);
   1508           } else {
   1509             // Uninitalized const bindings outside of harmony mode are unholed.
   1510             ASSERT(var->mode() == CONST);
   1511             __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
   1512           }
   1513           __ bind(&done);
   1514           context()->Plug(rax);
   1515           break;
   1516         }
   1517       }
   1518       context()->Plug(var);
   1519       break;
   1520     }
   1521 
   1522     case Variable::LOOKUP: {
   1523       Label done, slow;
   1524       // Generate code for loading from variables potentially shadowed
   1525       // by eval-introduced variables.
   1526       EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
   1527       __ bind(&slow);
   1528       Comment cmnt(masm_, "Lookup slot");
   1529       __ push(rsi);  // Context.
   1530       __ Push(var->name());
   1531       __ CallRuntime(Runtime::kLoadContextSlot, 2);
   1532       __ bind(&done);
   1533       context()->Plug(rax);
   1534       break;
   1535     }
   1536   }
   1537 }
   1538 
   1539 
   1540 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
   1541   Comment cmnt(masm_, "[ RegExpLiteral");
   1542   Label materialized;
   1543   // Registers will be used as follows:
   1544   // rdi = JS function.
   1545   // rcx = literals array.
   1546   // rbx = regexp literal.
   1547   // rax = regexp literal clone.
   1548   __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
   1549   __ movq(rcx, FieldOperand(rdi, JSFunction::kLiteralsOffset));
   1550   int literal_offset =
   1551       FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
   1552   __ movq(rbx, FieldOperand(rcx, literal_offset));
   1553   __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
   1554   __ j(not_equal, &materialized, Label::kNear);
   1555 
   1556   // Create regexp literal using runtime function
   1557   // Result will be in rax.
   1558   __ push(rcx);
   1559   __ Push(Smi::FromInt(expr->literal_index()));
   1560   __ Push(expr->pattern());
   1561   __ Push(expr->flags());
   1562   __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
   1563   __ movq(rbx, rax);
   1564 
   1565   __ bind(&materialized);
   1566   int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
   1567   Label allocated, runtime_allocate;
   1568   __ Allocate(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT);
   1569   __ jmp(&allocated);
   1570 
   1571   __ bind(&runtime_allocate);
   1572   __ push(rbx);
   1573   __ Push(Smi::FromInt(size));
   1574   __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
   1575   __ pop(rbx);
   1576 
   1577   __ bind(&allocated);
   1578   // Copy the content into the newly allocated memory.
   1579   // (Unroll copy loop once for better throughput).
   1580   for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
   1581     __ movq(rdx, FieldOperand(rbx, i));
   1582     __ movq(rcx, FieldOperand(rbx, i + kPointerSize));
   1583     __ movq(FieldOperand(rax, i), rdx);
   1584     __ movq(FieldOperand(rax, i + kPointerSize), rcx);
   1585   }
   1586   if ((size % (2 * kPointerSize)) != 0) {
   1587     __ movq(rdx, FieldOperand(rbx, size - kPointerSize));
   1588     __ movq(FieldOperand(rax, size - kPointerSize), rdx);
   1589   }
   1590   context()->Plug(rax);
   1591 }
   1592 
   1593 
   1594 void FullCodeGenerator::EmitAccessor(Expression* expression) {
   1595   if (expression == NULL) {
   1596     __ PushRoot(Heap::kNullValueRootIndex);
   1597   } else {
   1598     VisitForStackValue(expression);
   1599   }
   1600 }
   1601 
   1602 
   1603 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
   1604   Comment cmnt(masm_, "[ ObjectLiteral");
   1605   Handle<FixedArray> constant_properties = expr->constant_properties();
   1606   int flags = expr->fast_elements()
   1607       ? ObjectLiteral::kFastElements
   1608       : ObjectLiteral::kNoFlags;
   1609   flags |= expr->has_function()
   1610       ? ObjectLiteral::kHasFunction
   1611       : ObjectLiteral::kNoFlags;
   1612   int properties_count = constant_properties->length() / 2;
   1613   if ((FLAG_track_double_fields && expr->may_store_doubles()) ||
   1614       expr->depth() > 1) {
   1615     __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
   1616     __ push(FieldOperand(rdi, JSFunction::kLiteralsOffset));
   1617     __ Push(Smi::FromInt(expr->literal_index()));
   1618     __ Push(constant_properties);
   1619     __ Push(Smi::FromInt(flags));
   1620     __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
   1621   } else if (Serializer::enabled() || flags != ObjectLiteral::kFastElements ||
   1622       properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
   1623     __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
   1624     __ push(FieldOperand(rdi, JSFunction::kLiteralsOffset));
   1625     __ Push(Smi::FromInt(expr->literal_index()));
   1626     __ Push(constant_properties);
   1627     __ Push(Smi::FromInt(flags));
   1628     __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
   1629   } else {
   1630     __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
   1631     __ movq(rax, FieldOperand(rdi, JSFunction::kLiteralsOffset));
   1632     __ Move(rbx, Smi::FromInt(expr->literal_index()));
   1633     __ Move(rcx, constant_properties);
   1634     __ Move(rdx, Smi::FromInt(flags));
   1635     FastCloneShallowObjectStub stub(properties_count);
   1636     __ CallStub(&stub);
   1637   }
   1638 
   1639   // If result_saved is true the result is on top of the stack.  If
   1640   // result_saved is false the result is in rax.
   1641   bool result_saved = false;
   1642 
   1643   // Mark all computed expressions that are bound to a key that
   1644   // is shadowed by a later occurrence of the same key. For the
   1645   // marked expressions, no store code is emitted.
   1646   expr->CalculateEmitStore(zone());
   1647 
   1648   AccessorTable accessor_table(zone());
   1649   for (int i = 0; i < expr->properties()->length(); i++) {
   1650     ObjectLiteral::Property* property = expr->properties()->at(i);
   1651     if (property->IsCompileTimeValue()) continue;
   1652 
   1653     Literal* key = property->key();
   1654     Expression* value = property->value();
   1655     if (!result_saved) {
   1656       __ push(rax);  // Save result on the stack
   1657       result_saved = true;
   1658     }
   1659     switch (property->kind()) {
   1660       case ObjectLiteral::Property::CONSTANT:
   1661         UNREACHABLE();
   1662       case ObjectLiteral::Property::MATERIALIZED_LITERAL:
   1663         ASSERT(!CompileTimeValue::IsCompileTimeValue(value));
   1664         // Fall through.
   1665       case ObjectLiteral::Property::COMPUTED:
   1666         if (key->value()->IsInternalizedString()) {
   1667           if (property->emit_store()) {
   1668             VisitForAccumulatorValue(value);
   1669             __ Move(rcx, key->value());
   1670             __ movq(rdx, Operand(rsp, 0));
   1671             Handle<Code> ic = is_classic_mode()
   1672                 ? isolate()->builtins()->StoreIC_Initialize()
   1673                 : isolate()->builtins()->StoreIC_Initialize_Strict();
   1674             CallIC(ic, RelocInfo::CODE_TARGET, key->LiteralFeedbackId());
   1675             PrepareForBailoutForId(key->id(), NO_REGISTERS);
   1676           } else {
   1677             VisitForEffect(value);
   1678           }
   1679           break;
   1680         }
   1681         __ push(Operand(rsp, 0));  // Duplicate receiver.
   1682         VisitForStackValue(key);
   1683         VisitForStackValue(value);
   1684         if (property->emit_store()) {
   1685           __ Push(Smi::FromInt(NONE));    // PropertyAttributes
   1686           __ CallRuntime(Runtime::kSetProperty, 4);
   1687         } else {
   1688           __ Drop(3);
   1689         }
   1690         break;
   1691       case ObjectLiteral::Property::PROTOTYPE:
   1692         __ push(Operand(rsp, 0));  // Duplicate receiver.
   1693         VisitForStackValue(value);
   1694         if (property->emit_store()) {
   1695           __ CallRuntime(Runtime::kSetPrototype, 2);
   1696         } else {
   1697           __ Drop(2);
   1698         }
   1699         break;
   1700       case ObjectLiteral::Property::GETTER:
   1701         accessor_table.lookup(key)->second->getter = value;
   1702         break;
   1703       case ObjectLiteral::Property::SETTER:
   1704         accessor_table.lookup(key)->second->setter = value;
   1705         break;
   1706     }
   1707   }
   1708 
   1709   // Emit code to define accessors, using only a single call to the runtime for
   1710   // each pair of corresponding getters and setters.
   1711   for (AccessorTable::Iterator it = accessor_table.begin();
   1712        it != accessor_table.end();
   1713        ++it) {
   1714     __ push(Operand(rsp, 0));  // Duplicate receiver.
   1715     VisitForStackValue(it->first);
   1716     EmitAccessor(it->second->getter);
   1717     EmitAccessor(it->second->setter);
   1718     __ Push(Smi::FromInt(NONE));
   1719     __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
   1720   }
   1721 
   1722   if (expr->has_function()) {
   1723     ASSERT(result_saved);
   1724     __ push(Operand(rsp, 0));
   1725     __ CallRuntime(Runtime::kToFastProperties, 1);
   1726   }
   1727 
   1728   if (result_saved) {
   1729     context()->PlugTOS();
   1730   } else {
   1731     context()->Plug(rax);
   1732   }
   1733 }
   1734 
   1735 
   1736 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
   1737   Comment cmnt(masm_, "[ ArrayLiteral");
   1738 
   1739   ZoneList<Expression*>* subexprs = expr->values();
   1740   int length = subexprs->length();
   1741   Handle<FixedArray> constant_elements = expr->constant_elements();
   1742   ASSERT_EQ(2, constant_elements->length());
   1743   ElementsKind constant_elements_kind =
   1744       static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
   1745   bool has_constant_fast_elements =
   1746       IsFastObjectElementsKind(constant_elements_kind);
   1747   Handle<FixedArrayBase> constant_elements_values(
   1748       FixedArrayBase::cast(constant_elements->get(1)));
   1749 
   1750   Heap* heap = isolate()->heap();
   1751   if (has_constant_fast_elements &&
   1752       constant_elements_values->map() == heap->fixed_cow_array_map()) {
   1753     // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
   1754     // change, so it's possible to specialize the stub in advance.
   1755     __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
   1756     __ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
   1757     __ movq(rax, FieldOperand(rbx, JSFunction::kLiteralsOffset));
   1758     __ Move(rbx, Smi::FromInt(expr->literal_index()));
   1759     __ Move(rcx, constant_elements);
   1760     FastCloneShallowArrayStub stub(
   1761         FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
   1762         DONT_TRACK_ALLOCATION_SITE,
   1763         length);
   1764     __ CallStub(&stub);
   1765   } else if (expr->depth() > 1) {
   1766     __ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
   1767     __ push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
   1768     __ Push(Smi::FromInt(expr->literal_index()));
   1769     __ Push(constant_elements);
   1770     __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
   1771   } else if (Serializer::enabled() ||
   1772       length > FastCloneShallowArrayStub::kMaximumClonedLength) {
   1773     __ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
   1774     __ push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
   1775     __ Push(Smi::FromInt(expr->literal_index()));
   1776     __ Push(constant_elements);
   1777     __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
   1778   } else {
   1779     ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
   1780            FLAG_smi_only_arrays);
   1781     FastCloneShallowArrayStub::Mode mode =
   1782         FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
   1783     AllocationSiteMode allocation_site_mode = FLAG_track_allocation_sites
   1784         ? TRACK_ALLOCATION_SITE : DONT_TRACK_ALLOCATION_SITE;
   1785 
   1786     // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
   1787     // change, so it's possible to specialize the stub in advance.
   1788     if (has_constant_fast_elements) {
   1789       mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
   1790       allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
   1791     }
   1792 
   1793     __ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
   1794     __ movq(rax, FieldOperand(rbx, JSFunction::kLiteralsOffset));
   1795     __ Move(rbx, Smi::FromInt(expr->literal_index()));
   1796     __ Move(rcx, constant_elements);
   1797     FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
   1798     __ CallStub(&stub);
   1799   }
   1800 
   1801   bool result_saved = false;  // Is the result saved to the stack?
   1802 
   1803   // Emit code to evaluate all the non-constant subexpressions and to store
   1804   // them into the newly cloned array.
   1805   for (int i = 0; i < length; i++) {
   1806     Expression* subexpr = subexprs->at(i);
   1807     // If the subexpression is a literal or a simple materialized literal it
   1808     // is already set in the cloned array.
   1809     if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
   1810 
   1811     if (!result_saved) {
   1812       __ push(rax);  // array literal
   1813       __ Push(Smi::FromInt(expr->literal_index()));
   1814       result_saved = true;
   1815     }
   1816     VisitForAccumulatorValue(subexpr);
   1817 
   1818     if (IsFastObjectElementsKind(constant_elements_kind)) {
   1819       // Fast-case array literal with ElementsKind of FAST_*_ELEMENTS, they
   1820       // cannot transition and don't need to call the runtime stub.
   1821       int offset = FixedArray::kHeaderSize + (i * kPointerSize);
   1822       __ movq(rbx, Operand(rsp, kPointerSize));  // Copy of array literal.
   1823       __ movq(rbx, FieldOperand(rbx, JSObject::kElementsOffset));
   1824       // Store the subexpression value in the array's elements.
   1825       __ movq(FieldOperand(rbx, offset), result_register());
   1826       // Update the write barrier for the array store.
   1827       __ RecordWriteField(rbx, offset, result_register(), rcx,
   1828                           kDontSaveFPRegs,
   1829                           EMIT_REMEMBERED_SET,
   1830                           INLINE_SMI_CHECK);
   1831     } else {
   1832       // Store the subexpression value in the array's elements.
   1833       __ Move(rcx, Smi::FromInt(i));
   1834       StoreArrayLiteralElementStub stub;
   1835       __ CallStub(&stub);
   1836     }
   1837 
   1838     PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
   1839   }
   1840 
   1841   if (result_saved) {
   1842     __ addq(rsp, Immediate(kPointerSize));  // literal index
   1843     context()->PlugTOS();
   1844   } else {
   1845     context()->Plug(rax);
   1846   }
   1847 }
   1848 
   1849 
   1850 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
   1851   Comment cmnt(masm_, "[ Assignment");
   1852   // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
   1853   // on the left-hand side.
   1854   if (!expr->target()->IsValidLeftHandSide()) {
   1855     VisitForEffect(expr->target());
   1856     return;
   1857   }
   1858 
   1859   // Left-hand side can only be a property, a global or a (parameter or local)
   1860   // slot.
   1861   enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
   1862   LhsKind assign_type = VARIABLE;
   1863   Property* property = expr->target()->AsProperty();
   1864   if (property != NULL) {
   1865     assign_type = (property->key()->IsPropertyName())
   1866         ? NAMED_PROPERTY
   1867         : KEYED_PROPERTY;
   1868   }
   1869 
   1870   // Evaluate LHS expression.
   1871   switch (assign_type) {
   1872     case VARIABLE:
   1873       // Nothing to do here.
   1874       break;
   1875     case NAMED_PROPERTY:
   1876       if (expr->is_compound()) {
   1877         // We need the receiver both on the stack and in the accumulator.
   1878         VisitForAccumulatorValue(property->obj());
   1879         __ push(result_register());
   1880       } else {
   1881         VisitForStackValue(property->obj());
   1882       }
   1883       break;
   1884     case KEYED_PROPERTY: {
   1885       if (expr->is_compound()) {
   1886         VisitForStackValue(property->obj());
   1887         VisitForAccumulatorValue(property->key());
   1888         __ movq(rdx, Operand(rsp, 0));
   1889         __ push(rax);
   1890       } else {
   1891         VisitForStackValue(property->obj());
   1892         VisitForStackValue(property->key());
   1893       }
   1894       break;
   1895     }
   1896   }
   1897 
   1898   // For compound assignments we need another deoptimization point after the
   1899   // variable/property load.
   1900   if (expr->is_compound()) {
   1901     { AccumulatorValueContext context(this);
   1902       switch (assign_type) {
   1903         case VARIABLE:
   1904           EmitVariableLoad(expr->target()->AsVariableProxy());
   1905           PrepareForBailout(expr->target(), TOS_REG);
   1906           break;
   1907         case NAMED_PROPERTY:
   1908           EmitNamedPropertyLoad(property);
   1909           PrepareForBailoutForId(property->LoadId(), TOS_REG);
   1910           break;
   1911         case KEYED_PROPERTY:
   1912           EmitKeyedPropertyLoad(property);
   1913           PrepareForBailoutForId(property->LoadId(), TOS_REG);
   1914           break;
   1915       }
   1916     }
   1917 
   1918     Token::Value op = expr->binary_op();
   1919     __ push(rax);  // Left operand goes on the stack.
   1920     VisitForAccumulatorValue(expr->value());
   1921 
   1922     OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
   1923         ? OVERWRITE_RIGHT
   1924         : NO_OVERWRITE;
   1925     SetSourcePosition(expr->position() + 1);
   1926     AccumulatorValueContext context(this);
   1927     if (ShouldInlineSmiCase(op)) {
   1928       EmitInlineSmiBinaryOp(expr->binary_operation(),
   1929                             op,
   1930                             mode,
   1931                             expr->target(),
   1932                             expr->value());
   1933     } else {
   1934       EmitBinaryOp(expr->binary_operation(), op, mode);
   1935     }
   1936     // Deoptimization point in case the binary operation may have side effects.
   1937     PrepareForBailout(expr->binary_operation(), TOS_REG);
   1938   } else {
   1939     VisitForAccumulatorValue(expr->value());
   1940   }
   1941 
   1942   // Record source position before possible IC call.
   1943   SetSourcePosition(expr->position());
   1944 
   1945   // Store the value.
   1946   switch (assign_type) {
   1947     case VARIABLE:
   1948       EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
   1949                              expr->op());
   1950       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   1951       context()->Plug(rax);
   1952       break;
   1953     case NAMED_PROPERTY:
   1954       EmitNamedPropertyAssignment(expr);
   1955       break;
   1956     case KEYED_PROPERTY:
   1957       EmitKeyedPropertyAssignment(expr);
   1958       break;
   1959   }
   1960 }
   1961 
   1962 
   1963 void FullCodeGenerator::VisitYield(Yield* expr) {
   1964   Comment cmnt(masm_, "[ Yield");
   1965   // Evaluate yielded value first; the initial iterator definition depends on
   1966   // this.  It stays on the stack while we update the iterator.
   1967   VisitForStackValue(expr->expression());
   1968 
   1969   switch (expr->yield_kind()) {
   1970     case Yield::SUSPEND:
   1971       // Pop value from top-of-stack slot; box result into result register.
   1972       EmitCreateIteratorResult(false);
   1973       __ push(result_register());
   1974       // Fall through.
   1975     case Yield::INITIAL: {
   1976       Label suspend, continuation, post_runtime, resume;
   1977 
   1978       __ jmp(&suspend);
   1979 
   1980       __ bind(&continuation);
   1981       __ jmp(&resume);
   1982 
   1983       __ bind(&suspend);
   1984       VisitForAccumulatorValue(expr->generator_object());
   1985       ASSERT(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
   1986       __ Move(FieldOperand(rax, JSGeneratorObject::kContinuationOffset),
   1987               Smi::FromInt(continuation.pos()));
   1988       __ movq(FieldOperand(rax, JSGeneratorObject::kContextOffset), rsi);
   1989       __ movq(rcx, rsi);
   1990       __ RecordWriteField(rax, JSGeneratorObject::kContextOffset, rcx, rdx,
   1991                           kDontSaveFPRegs);
   1992       __ lea(rbx, Operand(rbp, StandardFrameConstants::kExpressionsOffset));
   1993       __ cmpq(rsp, rbx);
   1994       __ j(equal, &post_runtime);
   1995       __ push(rax);  // generator object
   1996       __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
   1997       __ movq(context_register(),
   1998               Operand(rbp, StandardFrameConstants::kContextOffset));
   1999       __ bind(&post_runtime);
   2000 
   2001       __ pop(result_register());
   2002       EmitReturnSequence();
   2003 
   2004       __ bind(&resume);
   2005       context()->Plug(result_register());
   2006       break;
   2007     }
   2008 
   2009     case Yield::FINAL: {
   2010       VisitForAccumulatorValue(expr->generator_object());
   2011       __ Move(FieldOperand(result_register(),
   2012                            JSGeneratorObject::kContinuationOffset),
   2013               Smi::FromInt(JSGeneratorObject::kGeneratorClosed));
   2014       // Pop value from top-of-stack slot, box result into result register.
   2015       EmitCreateIteratorResult(true);
   2016       EmitUnwindBeforeReturn();
   2017       EmitReturnSequence();
   2018       break;
   2019     }
   2020 
   2021     case Yield::DELEGATING: {
   2022       VisitForStackValue(expr->generator_object());
   2023 
   2024       // Initial stack layout is as follows:
   2025       // [sp + 1 * kPointerSize] iter
   2026       // [sp + 0 * kPointerSize] g
   2027 
   2028       Label l_catch, l_try, l_suspend, l_continuation, l_resume;
   2029       Label l_next, l_call, l_loop;
   2030       // Initial send value is undefined.
   2031       __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
   2032       __ jmp(&l_next);
   2033 
   2034       // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
   2035       __ bind(&l_catch);
   2036       handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
   2037       __ LoadRoot(rcx, Heap::kthrow_stringRootIndex);    // "throw"
   2038       __ push(rcx);
   2039       __ push(Operand(rsp, 2 * kPointerSize));           // iter
   2040       __ push(rax);                                      // exception
   2041       __ jmp(&l_call);
   2042 
   2043       // try { received = %yield result }
   2044       // Shuffle the received result above a try handler and yield it without
   2045       // re-boxing.
   2046       __ bind(&l_try);
   2047       __ pop(rax);                                       // result
   2048       __ PushTryHandler(StackHandler::CATCH, expr->index());
   2049       const int handler_size = StackHandlerConstants::kSize;
   2050       __ push(rax);                                      // result
   2051       __ jmp(&l_suspend);
   2052       __ bind(&l_continuation);
   2053       __ jmp(&l_resume);
   2054       __ bind(&l_suspend);
   2055       const int generator_object_depth = kPointerSize + handler_size;
   2056       __ movq(rax, Operand(rsp, generator_object_depth));
   2057       __ push(rax);                                      // g
   2058       ASSERT(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
   2059       __ Move(FieldOperand(rax, JSGeneratorObject::kContinuationOffset),
   2060               Smi::FromInt(l_continuation.pos()));
   2061       __ movq(FieldOperand(rax, JSGeneratorObject::kContextOffset), rsi);
   2062       __ movq(rcx, rsi);
   2063       __ RecordWriteField(rax, JSGeneratorObject::kContextOffset, rcx, rdx,
   2064                           kDontSaveFPRegs);
   2065       __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
   2066       __ movq(context_register(),
   2067               Operand(rbp, StandardFrameConstants::kContextOffset));
   2068       __ pop(rax);                                       // result
   2069       EmitReturnSequence();
   2070       __ bind(&l_resume);                                // received in rax
   2071       __ PopTryHandler();
   2072 
   2073       // receiver = iter; f = 'next'; arg = received;
   2074       __ bind(&l_next);
   2075       __ LoadRoot(rcx, Heap::knext_stringRootIndex);     // "next"
   2076       __ push(rcx);
   2077       __ push(Operand(rsp, 2 * kPointerSize));           // iter
   2078       __ push(rax);                                      // received
   2079 
   2080       // result = receiver[f](arg);
   2081       __ bind(&l_call);
   2082       Handle<Code> ic = isolate()->stub_cache()->ComputeKeyedCallInitialize(1);
   2083       CallIC(ic);
   2084       __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
   2085       __ Drop(1);  // The key is still on the stack; drop it.
   2086 
   2087       // if (!result.done) goto l_try;
   2088       __ bind(&l_loop);
   2089       __ push(rax);                                      // save result
   2090       __ LoadRoot(rcx, Heap::kdone_stringRootIndex);     // "done"
   2091       Handle<Code> done_ic = isolate()->builtins()->LoadIC_Initialize();
   2092       CallIC(done_ic);                                   // result.done in rax
   2093       Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
   2094       CallIC(bool_ic);
   2095       __ testq(result_register(), result_register());
   2096       __ j(zero, &l_try);
   2097 
   2098       // result.value
   2099       __ pop(rax);                                       // result
   2100       __ LoadRoot(rcx, Heap::kvalue_stringRootIndex);    // "value"
   2101       Handle<Code> value_ic = isolate()->builtins()->LoadIC_Initialize();
   2102       CallIC(value_ic);                                  // result.value in rax
   2103       context()->DropAndPlug(2, rax);                    // drop iter and g
   2104       break;
   2105     }
   2106   }
   2107 }
   2108 
   2109 
   2110 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
   2111     Expression *value,
   2112     JSGeneratorObject::ResumeMode resume_mode) {
   2113   // The value stays in rax, and is ultimately read by the resumed generator, as
   2114   // if the CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it.  rbx
   2115   // will hold the generator object until the activation has been resumed.
   2116   VisitForStackValue(generator);
   2117   VisitForAccumulatorValue(value);
   2118   __ pop(rbx);
   2119 
   2120   // Check generator state.
   2121   Label wrong_state, done;
   2122   STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting <= 0);
   2123   STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed <= 0);
   2124   __ SmiCompare(FieldOperand(rbx, JSGeneratorObject::kContinuationOffset),
   2125                 Smi::FromInt(0));
   2126   __ j(less_equal, &wrong_state);
   2127 
   2128   // Load suspended function and context.
   2129   __ movq(rsi, FieldOperand(rbx, JSGeneratorObject::kContextOffset));
   2130   __ movq(rdi, FieldOperand(rbx, JSGeneratorObject::kFunctionOffset));
   2131 
   2132   // Push receiver.
   2133   __ push(FieldOperand(rbx, JSGeneratorObject::kReceiverOffset));
   2134 
   2135   // Push holes for arguments to generator function.
   2136   __ movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
   2137   __ movsxlq(rdx,
   2138              FieldOperand(rdx,
   2139                           SharedFunctionInfo::kFormalParameterCountOffset));
   2140   __ LoadRoot(rcx, Heap::kTheHoleValueRootIndex);
   2141   Label push_argument_holes, push_frame;
   2142   __ bind(&push_argument_holes);
   2143   __ subq(rdx, Immediate(1));
   2144   __ j(carry, &push_frame);
   2145   __ push(rcx);
   2146   __ jmp(&push_argument_holes);
   2147 
   2148   // Enter a new JavaScript frame, and initialize its slots as they were when
   2149   // the generator was suspended.
   2150   Label resume_frame;
   2151   __ bind(&push_frame);
   2152   __ call(&resume_frame);
   2153   __ jmp(&done);
   2154   __ bind(&resume_frame);
   2155   __ push(rbp);  // Caller's frame pointer.
   2156   __ movq(rbp, rsp);
   2157   __ push(rsi);  // Callee's context.
   2158   __ push(rdi);  // Callee's JS Function.
   2159 
   2160   // Load the operand stack size.
   2161   __ movq(rdx, FieldOperand(rbx, JSGeneratorObject::kOperandStackOffset));
   2162   __ movq(rdx, FieldOperand(rdx, FixedArray::kLengthOffset));
   2163   __ SmiToInteger32(rdx, rdx);
   2164 
   2165   // If we are sending a value and there is no operand stack, we can jump back
   2166   // in directly.
   2167   if (resume_mode == JSGeneratorObject::NEXT) {
   2168     Label slow_resume;
   2169     __ cmpq(rdx, Immediate(0));
   2170     __ j(not_zero, &slow_resume);
   2171     __ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
   2172     __ SmiToInteger64(rcx,
   2173         FieldOperand(rbx, JSGeneratorObject::kContinuationOffset));
   2174     __ addq(rdx, rcx);
   2175     __ Move(FieldOperand(rbx, JSGeneratorObject::kContinuationOffset),
   2176             Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
   2177     __ jmp(rdx);
   2178     __ bind(&slow_resume);
   2179   }
   2180 
   2181   // Otherwise, we push holes for the operand stack and call the runtime to fix
   2182   // up the stack and the handlers.
   2183   Label push_operand_holes, call_resume;
   2184   __ bind(&push_operand_holes);
   2185   __ subq(rdx, Immediate(1));
   2186   __ j(carry, &call_resume);
   2187   __ push(rcx);
   2188   __ jmp(&push_operand_holes);
   2189   __ bind(&call_resume);
   2190   __ push(rbx);
   2191   __ push(result_register());
   2192   __ Push(Smi::FromInt(resume_mode));
   2193   __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
   2194   // Not reached: the runtime call returns elsewhere.
   2195   __ Abort(kGeneratorFailedToResume);
   2196 
   2197   // Throw error if we attempt to operate on a running generator.
   2198   __ bind(&wrong_state);
   2199   __ push(rbx);
   2200   __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
   2201 
   2202   __ bind(&done);
   2203   context()->Plug(result_register());
   2204 }
   2205 
   2206 
   2207 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
   2208   Label gc_required;
   2209   Label allocated;
   2210 
   2211   Handle<Map> map(isolate()->native_context()->generator_result_map());
   2212 
   2213   __ Allocate(map->instance_size(), rax, rcx, rdx, &gc_required, TAG_OBJECT);
   2214   __ jmp(&allocated);
   2215 
   2216   __ bind(&gc_required);
   2217   __ Push(Smi::FromInt(map->instance_size()));
   2218   __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
   2219   __ movq(context_register(),
   2220           Operand(rbp, StandardFrameConstants::kContextOffset));
   2221 
   2222   __ bind(&allocated);
   2223   __ Move(rbx, map);
   2224   __ pop(rcx);
   2225   __ Move(rdx, isolate()->factory()->ToBoolean(done));
   2226   ASSERT_EQ(map->instance_size(), 5 * kPointerSize);
   2227   __ movq(FieldOperand(rax, HeapObject::kMapOffset), rbx);
   2228   __ Move(FieldOperand(rax, JSObject::kPropertiesOffset),
   2229           isolate()->factory()->empty_fixed_array());
   2230   __ Move(FieldOperand(rax, JSObject::kElementsOffset),
   2231           isolate()->factory()->empty_fixed_array());
   2232   __ movq(FieldOperand(rax, JSGeneratorObject::kResultValuePropertyOffset),
   2233           rcx);
   2234   __ movq(FieldOperand(rax, JSGeneratorObject::kResultDonePropertyOffset),
   2235           rdx);
   2236 
   2237   // Only the value field needs a write barrier, as the other values are in the
   2238   // root set.
   2239   __ RecordWriteField(rax, JSGeneratorObject::kResultValuePropertyOffset,
   2240                       rcx, rdx, kDontSaveFPRegs);
   2241 }
   2242 
   2243 
   2244 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
   2245   SetSourcePosition(prop->position());
   2246   Literal* key = prop->key()->AsLiteral();
   2247   __ Move(rcx, key->value());
   2248   Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
   2249   CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
   2250 }
   2251 
   2252 
   2253 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
   2254   SetSourcePosition(prop->position());
   2255   Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
   2256   CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
   2257 }
   2258 
   2259 
   2260 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
   2261                                               Token::Value op,
   2262                                               OverwriteMode mode,
   2263                                               Expression* left,
   2264                                               Expression* right) {
   2265   // Do combined smi check of the operands. Left operand is on the
   2266   // stack (popped into rdx). Right operand is in rax but moved into
   2267   // rcx to make the shifts easier.
   2268   Label done, stub_call, smi_case;
   2269   __ pop(rdx);
   2270   __ movq(rcx, rax);
   2271   __ or_(rax, rdx);
   2272   JumpPatchSite patch_site(masm_);
   2273   patch_site.EmitJumpIfSmi(rax, &smi_case, Label::kNear);
   2274 
   2275   __ bind(&stub_call);
   2276   __ movq(rax, rcx);
   2277   BinaryOpStub stub(op, mode);
   2278   CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET,
   2279          expr->BinaryOperationFeedbackId());
   2280   patch_site.EmitPatchInfo();
   2281   __ jmp(&done, Label::kNear);
   2282 
   2283   __ bind(&smi_case);
   2284   switch (op) {
   2285     case Token::SAR:
   2286       __ SmiShiftArithmeticRight(rax, rdx, rcx);
   2287       break;
   2288     case Token::SHL:
   2289       __ SmiShiftLeft(rax, rdx, rcx);
   2290       break;
   2291     case Token::SHR:
   2292       __ SmiShiftLogicalRight(rax, rdx, rcx, &stub_call);
   2293       break;
   2294     case Token::ADD:
   2295       __ SmiAdd(rax, rdx, rcx, &stub_call);
   2296       break;
   2297     case Token::SUB:
   2298       __ SmiSub(rax, rdx, rcx, &stub_call);
   2299       break;
   2300     case Token::MUL:
   2301       __ SmiMul(rax, rdx, rcx, &stub_call);
   2302       break;
   2303     case Token::BIT_OR:
   2304       __ SmiOr(rax, rdx, rcx);
   2305       break;
   2306     case Token::BIT_AND:
   2307       __ SmiAnd(rax, rdx, rcx);
   2308       break;
   2309     case Token::BIT_XOR:
   2310       __ SmiXor(rax, rdx, rcx);
   2311       break;
   2312     default:
   2313       UNREACHABLE();
   2314       break;
   2315   }
   2316 
   2317   __ bind(&done);
   2318   context()->Plug(rax);
   2319 }
   2320 
   2321 
   2322 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
   2323                                      Token::Value op,
   2324                                      OverwriteMode mode) {
   2325   __ pop(rdx);
   2326   BinaryOpStub stub(op, mode);
   2327   JumpPatchSite patch_site(masm_);    // unbound, signals no inlined smi code.
   2328   CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET,
   2329          expr->BinaryOperationFeedbackId());
   2330   patch_site.EmitPatchInfo();
   2331   context()->Plug(rax);
   2332 }
   2333 
   2334 
   2335 void FullCodeGenerator::EmitAssignment(Expression* expr) {
   2336   // Invalid left-hand sides are rewritten by the parser to have a 'throw
   2337   // ReferenceError' on the left-hand side.
   2338   if (!expr->IsValidLeftHandSide()) {
   2339     VisitForEffect(expr);
   2340     return;
   2341   }
   2342 
   2343   // Left-hand side can only be a property, a global or a (parameter or local)
   2344   // slot.
   2345   enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
   2346   LhsKind assign_type = VARIABLE;
   2347   Property* prop = expr->AsProperty();
   2348   if (prop != NULL) {
   2349     assign_type = (prop->key()->IsPropertyName())
   2350         ? NAMED_PROPERTY
   2351         : KEYED_PROPERTY;
   2352   }
   2353 
   2354   switch (assign_type) {
   2355     case VARIABLE: {
   2356       Variable* var = expr->AsVariableProxy()->var();
   2357       EffectContext context(this);
   2358       EmitVariableAssignment(var, Token::ASSIGN);
   2359       break;
   2360     }
   2361     case NAMED_PROPERTY: {
   2362       __ push(rax);  // Preserve value.
   2363       VisitForAccumulatorValue(prop->obj());
   2364       __ movq(rdx, rax);
   2365       __ pop(rax);  // Restore value.
   2366       __ Move(rcx, prop->key()->AsLiteral()->value());
   2367       Handle<Code> ic = is_classic_mode()
   2368           ? isolate()->builtins()->StoreIC_Initialize()
   2369           : isolate()->builtins()->StoreIC_Initialize_Strict();
   2370       CallIC(ic);
   2371       break;
   2372     }
   2373     case KEYED_PROPERTY: {
   2374       __ push(rax);  // Preserve value.
   2375       VisitForStackValue(prop->obj());
   2376       VisitForAccumulatorValue(prop->key());
   2377       __ movq(rcx, rax);
   2378       __ pop(rdx);
   2379       __ pop(rax);  // Restore value.
   2380       Handle<Code> ic = is_classic_mode()
   2381           ? isolate()->builtins()->KeyedStoreIC_Initialize()
   2382           : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
   2383       CallIC(ic);
   2384       break;
   2385     }
   2386   }
   2387   context()->Plug(rax);
   2388 }
   2389 
   2390 
   2391 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
   2392                                                Token::Value op) {
   2393   if (var->IsUnallocated()) {
   2394     // Global var, const, or let.
   2395     __ Move(rcx, var->name());
   2396     __ movq(rdx, GlobalObjectOperand());
   2397     Handle<Code> ic = is_classic_mode()
   2398         ? isolate()->builtins()->StoreIC_Initialize()
   2399         : isolate()->builtins()->StoreIC_Initialize_Strict();
   2400     CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
   2401   } else if (op == Token::INIT_CONST) {
   2402     // Const initializers need a write barrier.
   2403     ASSERT(!var->IsParameter());  // No const parameters.
   2404     if (var->IsStackLocal()) {
   2405       Label skip;
   2406       __ movq(rdx, StackOperand(var));
   2407       __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
   2408       __ j(not_equal, &skip);
   2409       __ movq(StackOperand(var), rax);
   2410       __ bind(&skip);
   2411     } else {
   2412       ASSERT(var->IsContextSlot() || var->IsLookupSlot());
   2413       // Like var declarations, const declarations are hoisted to function
   2414       // scope.  However, unlike var initializers, const initializers are
   2415       // able to drill a hole to that function context, even from inside a
   2416       // 'with' context.  We thus bypass the normal static scope lookup for
   2417       // var->IsContextSlot().
   2418       __ push(rax);
   2419       __ push(rsi);
   2420       __ Push(var->name());
   2421       __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
   2422     }
   2423 
   2424   } else if (var->mode() == LET && op != Token::INIT_LET) {
   2425     // Non-initializing assignment to let variable needs a write barrier.
   2426     if (var->IsLookupSlot()) {
   2427       __ push(rax);  // Value.
   2428       __ push(rsi);  // Context.
   2429       __ Push(var->name());
   2430       __ Push(Smi::FromInt(language_mode()));
   2431       __ CallRuntime(Runtime::kStoreContextSlot, 4);
   2432     } else {
   2433       ASSERT(var->IsStackAllocated() || var->IsContextSlot());
   2434       Label assign;
   2435       MemOperand location = VarOperand(var, rcx);
   2436       __ movq(rdx, location);
   2437       __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
   2438       __ j(not_equal, &assign, Label::kNear);
   2439       __ Push(var->name());
   2440       __ CallRuntime(Runtime::kThrowReferenceError, 1);
   2441       __ bind(&assign);
   2442       __ movq(location, rax);
   2443       if (var->IsContextSlot()) {
   2444         __ movq(rdx, rax);
   2445         __ RecordWriteContextSlot(
   2446             rcx, Context::SlotOffset(var->index()), rdx, rbx, kDontSaveFPRegs);
   2447       }
   2448     }
   2449 
   2450   } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
   2451     // Assignment to var or initializing assignment to let/const
   2452     // in harmony mode.
   2453     if (var->IsStackAllocated() || var->IsContextSlot()) {
   2454       MemOperand location = VarOperand(var, rcx);
   2455       if (generate_debug_code_ && op == Token::INIT_LET) {
   2456         // Check for an uninitialized let binding.
   2457         __ movq(rdx, location);
   2458         __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
   2459         __ Check(equal, kLetBindingReInitialization);
   2460       }
   2461       // Perform the assignment.
   2462       __ movq(location, rax);
   2463       if (var->IsContextSlot()) {
   2464         __ movq(rdx, rax);
   2465         __ RecordWriteContextSlot(
   2466             rcx, Context::SlotOffset(var->index()), rdx, rbx, kDontSaveFPRegs);
   2467       }
   2468     } else {
   2469       ASSERT(var->IsLookupSlot());
   2470       __ push(rax);  // Value.
   2471       __ push(rsi);  // Context.
   2472       __ Push(var->name());
   2473       __ Push(Smi::FromInt(language_mode()));
   2474       __ CallRuntime(Runtime::kStoreContextSlot, 4);
   2475     }
   2476   }
   2477   // Non-initializing assignments to consts are ignored.
   2478 }
   2479 
   2480 
   2481 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
   2482   // Assignment to a property, using a named store IC.
   2483   Property* prop = expr->target()->AsProperty();
   2484   ASSERT(prop != NULL);
   2485   ASSERT(prop->key()->AsLiteral() != NULL);
   2486 
   2487   // Record source code position before IC call.
   2488   SetSourcePosition(expr->position());
   2489   __ Move(rcx, prop->key()->AsLiteral()->value());
   2490   __ pop(rdx);
   2491   Handle<Code> ic = is_classic_mode()
   2492       ? isolate()->builtins()->StoreIC_Initialize()
   2493       : isolate()->builtins()->StoreIC_Initialize_Strict();
   2494   CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
   2495 
   2496   PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   2497   context()->Plug(rax);
   2498 }
   2499 
   2500 
   2501 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
   2502   // Assignment to a property, using a keyed store IC.
   2503 
   2504   __ pop(rcx);
   2505   __ pop(rdx);
   2506   // Record source code position before IC call.
   2507   SetSourcePosition(expr->position());
   2508   Handle<Code> ic = is_classic_mode()
   2509       ? isolate()->builtins()->KeyedStoreIC_Initialize()
   2510       : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
   2511   CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
   2512 
   2513   PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   2514   context()->Plug(rax);
   2515 }
   2516 
   2517 
   2518 void FullCodeGenerator::VisitProperty(Property* expr) {
   2519   Comment cmnt(masm_, "[ Property");
   2520   Expression* key = expr->key();
   2521 
   2522   if (key->IsPropertyName()) {
   2523     VisitForAccumulatorValue(expr->obj());
   2524     EmitNamedPropertyLoad(expr);
   2525     PrepareForBailoutForId(expr->LoadId(), TOS_REG);
   2526     context()->Plug(rax);
   2527   } else {
   2528     VisitForStackValue(expr->obj());
   2529     VisitForAccumulatorValue(expr->key());
   2530     __ pop(rdx);
   2531     EmitKeyedPropertyLoad(expr);
   2532     context()->Plug(rax);
   2533   }
   2534 }
   2535 
   2536 
   2537 void FullCodeGenerator::CallIC(Handle<Code> code,
   2538                                RelocInfo::Mode rmode,
   2539                                TypeFeedbackId ast_id) {
   2540   ic_total_count_++;
   2541   __ call(code, rmode, ast_id);
   2542 }
   2543 
   2544 
   2545 void FullCodeGenerator::EmitCallWithIC(Call* expr,
   2546                                        Handle<Object> name,
   2547                                        RelocInfo::Mode mode) {
   2548   // Code common for calls using the IC.
   2549   ZoneList<Expression*>* args = expr->arguments();
   2550   int arg_count = args->length();
   2551   { PreservePositionScope scope(masm()->positions_recorder());
   2552     for (int i = 0; i < arg_count; i++) {
   2553       VisitForStackValue(args->at(i));
   2554     }
   2555     __ Move(rcx, name);
   2556   }
   2557   // Record source position for debugger.
   2558   SetSourcePosition(expr->position());
   2559   // Call the IC initialization code.
   2560   Handle<Code> ic =
   2561       isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
   2562   CallIC(ic, mode, expr->CallFeedbackId());
   2563   RecordJSReturnSite(expr);
   2564   // Restore context register.
   2565   __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
   2566   context()->Plug(rax);
   2567 }
   2568 
   2569 
   2570 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
   2571                                             Expression* key) {
   2572   // Load the key.
   2573   VisitForAccumulatorValue(key);
   2574 
   2575   // Swap the name of the function and the receiver on the stack to follow
   2576   // the calling convention for call ICs.
   2577   __ pop(rcx);
   2578   __ push(rax);
   2579   __ push(rcx);
   2580 
   2581   // Load the arguments.
   2582   ZoneList<Expression*>* args = expr->arguments();
   2583   int arg_count = args->length();
   2584   { PreservePositionScope scope(masm()->positions_recorder());
   2585     for (int i = 0; i < arg_count; i++) {
   2586       VisitForStackValue(args->at(i));
   2587     }
   2588   }
   2589   // Record source position for debugger.
   2590   SetSourcePosition(expr->position());
   2591   // Call the IC initialization code.
   2592   Handle<Code> ic =
   2593       isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
   2594   __ movq(rcx, Operand(rsp, (arg_count + 1) * kPointerSize));  // Key.
   2595   CallIC(ic, RelocInfo::CODE_TARGET, expr->CallFeedbackId());
   2596   RecordJSReturnSite(expr);
   2597   // Restore context register.
   2598   __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
   2599   context()->DropAndPlug(1, rax);  // Drop the key still on the stack.
   2600 }
   2601 
   2602 
   2603 void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
   2604   // Code common for calls using the call stub.
   2605   ZoneList<Expression*>* args = expr->arguments();
   2606   int arg_count = args->length();
   2607   { PreservePositionScope scope(masm()->positions_recorder());
   2608     for (int i = 0; i < arg_count; i++) {
   2609       VisitForStackValue(args->at(i));
   2610     }
   2611   }
   2612   // Record source position for debugger.
   2613   SetSourcePosition(expr->position());
   2614 
   2615   // Record call targets in unoptimized code.
   2616   flags = static_cast<CallFunctionFlags>(flags | RECORD_CALL_TARGET);
   2617   Handle<Object> uninitialized =
   2618       TypeFeedbackCells::UninitializedSentinel(isolate());
   2619   Handle<Cell> cell = isolate()->factory()->NewCell(uninitialized);
   2620   RecordTypeFeedbackCell(expr->CallFeedbackId(), cell);
   2621   __ Move(rbx, cell);
   2622 
   2623   CallFunctionStub stub(arg_count, flags);
   2624   __ movq(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
   2625   __ CallStub(&stub, expr->CallFeedbackId());
   2626   RecordJSReturnSite(expr);
   2627   // Restore context register.
   2628   __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
   2629   // Discard the function left on TOS.
   2630   context()->DropAndPlug(1, rax);
   2631 }
   2632 
   2633 
   2634 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
   2635   // Push copy of the first argument or undefined if it doesn't exist.
   2636   if (arg_count > 0) {
   2637     __ push(Operand(rsp, arg_count * kPointerSize));
   2638   } else {
   2639     __ PushRoot(Heap::kUndefinedValueRootIndex);
   2640   }
   2641 
   2642   // Push the receiver of the enclosing function and do runtime call.
   2643   __ push(Operand(rbp, (2 + info_->scope()->num_parameters()) * kPointerSize));
   2644 
   2645   // Push the language mode.
   2646   __ Push(Smi::FromInt(language_mode()));
   2647 
   2648   // Push the start position of the scope the calls resides in.
   2649   __ Push(Smi::FromInt(scope()->start_position()));
   2650 
   2651   // Do the runtime call.
   2652   __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
   2653 }
   2654 
   2655 
   2656 void FullCodeGenerator::VisitCall(Call* expr) {
   2657 #ifdef DEBUG
   2658   // We want to verify that RecordJSReturnSite gets called on all paths
   2659   // through this function.  Avoid early returns.
   2660   expr->return_is_recorded_ = false;
   2661 #endif
   2662 
   2663   Comment cmnt(masm_, "[ Call");
   2664   Expression* callee = expr->expression();
   2665   VariableProxy* proxy = callee->AsVariableProxy();
   2666   Property* property = callee->AsProperty();
   2667 
   2668   if (proxy != NULL && proxy->var()->is_possibly_eval(isolate())) {
   2669     // In a call to eval, we first call %ResolvePossiblyDirectEval to
   2670     // resolve the function we need to call and the receiver of the call.
   2671     // Then we call the resolved function using the given arguments.
   2672     ZoneList<Expression*>* args = expr->arguments();
   2673     int arg_count = args->length();
   2674     { PreservePositionScope pos_scope(masm()->positions_recorder());
   2675       VisitForStackValue(callee);
   2676       __ PushRoot(Heap::kUndefinedValueRootIndex);  // Reserved receiver slot.
   2677 
   2678       // Push the arguments.
   2679       for (int i = 0; i < arg_count; i++) {
   2680         VisitForStackValue(args->at(i));
   2681       }
   2682 
   2683       // Push a copy of the function (found below the arguments) and resolve
   2684       // eval.
   2685       __ push(Operand(rsp, (arg_count + 1) * kPointerSize));
   2686       EmitResolvePossiblyDirectEval(arg_count);
   2687 
   2688       // The runtime call returns a pair of values in rax (function) and
   2689       // rdx (receiver). Touch up the stack with the right values.
   2690       __ movq(Operand(rsp, (arg_count + 0) * kPointerSize), rdx);
   2691       __ movq(Operand(rsp, (arg_count + 1) * kPointerSize), rax);
   2692     }
   2693     // Record source position for debugger.
   2694     SetSourcePosition(expr->position());
   2695     CallFunctionStub stub(arg_count, RECEIVER_MIGHT_BE_IMPLICIT);
   2696     __ movq(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
   2697     __ CallStub(&stub);
   2698     RecordJSReturnSite(expr);
   2699     // Restore context register.
   2700     __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
   2701     context()->DropAndPlug(1, rax);
   2702   } else if (proxy != NULL && proxy->var()->IsUnallocated()) {
   2703     // Call to a global variable.  Push global object as receiver for the
   2704     // call IC lookup.
   2705     __ push(GlobalObjectOperand());
   2706     EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
   2707   } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
   2708     // Call to a lookup slot (dynamically introduced variable).
   2709     Label slow, done;
   2710 
   2711     { PreservePositionScope scope(masm()->positions_recorder());
   2712       // Generate code for loading from variables potentially shadowed by
   2713       // eval-introduced variables.
   2714       EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
   2715     }
   2716     __ bind(&slow);
   2717     // Call the runtime to find the function to call (returned in rax) and
   2718     // the object holding it (returned in rdx).
   2719     __ push(context_register());
   2720     __ Push(proxy->name());
   2721     __ CallRuntime(Runtime::kLoadContextSlot, 2);
   2722     __ push(rax);  // Function.
   2723     __ push(rdx);  // Receiver.
   2724 
   2725     // If fast case code has been generated, emit code to push the function
   2726     // and receiver and have the slow path jump around this code.
   2727     if (done.is_linked()) {
   2728       Label call;
   2729       __ jmp(&call, Label::kNear);
   2730       __ bind(&done);
   2731       // Push function.
   2732       __ push(rax);
   2733       // The receiver is implicitly the global receiver. Indicate this by
   2734       // passing the hole to the call function stub.
   2735       __ PushRoot(Heap::kTheHoleValueRootIndex);
   2736       __ bind(&call);
   2737     }
   2738 
   2739     // The receiver is either the global receiver or an object found by
   2740     // LoadContextSlot. That object could be the hole if the receiver is
   2741     // implicitly the global object.
   2742     EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT);
   2743   } else if (property != NULL) {
   2744     { PreservePositionScope scope(masm()->positions_recorder());
   2745       VisitForStackValue(property->obj());
   2746     }
   2747     if (property->key()->IsPropertyName()) {
   2748       EmitCallWithIC(expr,
   2749                      property->key()->AsLiteral()->value(),
   2750                      RelocInfo::CODE_TARGET);
   2751     } else {
   2752       EmitKeyedCallWithIC(expr, property->key());
   2753     }
   2754   } else {
   2755     // Call to an arbitrary expression not handled specially above.
   2756     { PreservePositionScope scope(masm()->positions_recorder());
   2757       VisitForStackValue(callee);
   2758     }
   2759     // Load global receiver object.
   2760     __ movq(rbx, GlobalObjectOperand());
   2761     __ push(FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
   2762     // Emit function call.
   2763     EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
   2764   }
   2765 
   2766 #ifdef DEBUG
   2767   // RecordJSReturnSite should have been called.
   2768   ASSERT(expr->return_is_recorded_);
   2769 #endif
   2770 }
   2771 
   2772 
   2773 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
   2774   Comment cmnt(masm_, "[ CallNew");
   2775   // According to ECMA-262, section 11.2.2, page 44, the function
   2776   // expression in new calls must be evaluated before the
   2777   // arguments.
   2778 
   2779   // Push constructor on the stack.  If it's not a function it's used as
   2780   // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
   2781   // ignored.
   2782   VisitForStackValue(expr->expression());
   2783 
   2784   // Push the arguments ("left-to-right") on the stack.
   2785   ZoneList<Expression*>* args = expr->arguments();
   2786   int arg_count = args->length();
   2787   for (int i = 0; i < arg_count; i++) {
   2788     VisitForStackValue(args->at(i));
   2789   }
   2790 
   2791   // Call the construct call builtin that handles allocation and
   2792   // constructor invocation.
   2793   SetSourcePosition(expr->position());
   2794 
   2795   // Load function and argument count into rdi and rax.
   2796   __ Set(rax, arg_count);
   2797   __ movq(rdi, Operand(rsp, arg_count * kPointerSize));
   2798 
   2799   // Record call targets in unoptimized code, but not in the snapshot.
   2800   Handle<Object> uninitialized =
   2801       TypeFeedbackCells::UninitializedSentinel(isolate());
   2802   Handle<Cell> cell = isolate()->factory()->NewCell(uninitialized);
   2803   RecordTypeFeedbackCell(expr->CallNewFeedbackId(), cell);
   2804   __ Move(rbx, cell);
   2805 
   2806   CallConstructStub stub(RECORD_CALL_TARGET);
   2807   __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
   2808   PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
   2809   context()->Plug(rax);
   2810 }
   2811 
   2812 
   2813 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
   2814   ZoneList<Expression*>* args = expr->arguments();
   2815   ASSERT(args->length() == 1);
   2816 
   2817   VisitForAccumulatorValue(args->at(0));
   2818 
   2819   Label materialize_true, materialize_false;
   2820   Label* if_true = NULL;
   2821   Label* if_false = NULL;
   2822   Label* fall_through = NULL;
   2823   context()->PrepareTest(&materialize_true, &materialize_false,
   2824                          &if_true, &if_false, &fall_through);
   2825 
   2826   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2827   __ JumpIfSmi(rax, if_true);
   2828   __ jmp(if_false);
   2829 
   2830   context()->Plug(if_true, if_false);
   2831 }
   2832 
   2833 
   2834 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
   2835   ZoneList<Expression*>* args = expr->arguments();
   2836   ASSERT(args->length() == 1);
   2837 
   2838   VisitForAccumulatorValue(args->at(0));
   2839 
   2840   Label materialize_true, materialize_false;
   2841   Label* if_true = NULL;
   2842   Label* if_false = NULL;
   2843   Label* fall_through = NULL;
   2844   context()->PrepareTest(&materialize_true, &materialize_false,
   2845                          &if_true, &if_false, &fall_through);
   2846 
   2847   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2848   Condition non_negative_smi = masm()->CheckNonNegativeSmi(rax);
   2849   Split(non_negative_smi, if_true, if_false, fall_through);
   2850 
   2851   context()->Plug(if_true, if_false);
   2852 }
   2853 
   2854 
   2855 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
   2856   ZoneList<Expression*>* args = expr->arguments();
   2857   ASSERT(args->length() == 1);
   2858 
   2859   VisitForAccumulatorValue(args->at(0));
   2860 
   2861   Label materialize_true, materialize_false;
   2862   Label* if_true = NULL;
   2863   Label* if_false = NULL;
   2864   Label* fall_through = NULL;
   2865   context()->PrepareTest(&materialize_true, &materialize_false,
   2866                          &if_true, &if_false, &fall_through);
   2867 
   2868   __ JumpIfSmi(rax, if_false);
   2869   __ CompareRoot(rax, Heap::kNullValueRootIndex);
   2870   __ j(equal, if_true);
   2871   __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
   2872   // Undetectable objects behave like undefined when tested with typeof.
   2873   __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
   2874            Immediate(1 << Map::kIsUndetectable));
   2875   __ j(not_zero, if_false);
   2876   __ movzxbq(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
   2877   __ cmpq(rbx, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
   2878   __ j(below, if_false);
   2879   __ cmpq(rbx, Immediate(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
   2880   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2881   Split(below_equal, if_true, if_false, fall_through);
   2882 
   2883   context()->Plug(if_true, if_false);
   2884 }
   2885 
   2886 
   2887 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
   2888   ZoneList<Expression*>* args = expr->arguments();
   2889   ASSERT(args->length() == 1);
   2890 
   2891   VisitForAccumulatorValue(args->at(0));
   2892 
   2893   Label materialize_true, materialize_false;
   2894   Label* if_true = NULL;
   2895   Label* if_false = NULL;
   2896   Label* fall_through = NULL;
   2897   context()->PrepareTest(&materialize_true, &materialize_false,
   2898                          &if_true, &if_false, &fall_through);
   2899 
   2900   __ JumpIfSmi(rax, if_false);
   2901   __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rbx);
   2902   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2903   Split(above_equal, if_true, if_false, fall_through);
   2904 
   2905   context()->Plug(if_true, if_false);
   2906 }
   2907 
   2908 
   2909 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
   2910   ZoneList<Expression*>* args = expr->arguments();
   2911   ASSERT(args->length() == 1);
   2912 
   2913   VisitForAccumulatorValue(args->at(0));
   2914 
   2915   Label materialize_true, materialize_false;
   2916   Label* if_true = NULL;
   2917   Label* if_false = NULL;
   2918   Label* fall_through = NULL;
   2919   context()->PrepareTest(&materialize_true, &materialize_false,
   2920                          &if_true, &if_false, &fall_through);
   2921 
   2922   __ JumpIfSmi(rax, if_false);
   2923   __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
   2924   __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
   2925            Immediate(1 << Map::kIsUndetectable));
   2926   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2927   Split(not_zero, if_true, if_false, fall_through);
   2928 
   2929   context()->Plug(if_true, if_false);
   2930 }
   2931 
   2932 
   2933 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
   2934     CallRuntime* expr) {
   2935   ZoneList<Expression*>* args = expr->arguments();
   2936   ASSERT(args->length() == 1);
   2937 
   2938   VisitForAccumulatorValue(args->at(0));
   2939 
   2940   Label materialize_true, materialize_false;
   2941   Label* if_true = NULL;
   2942   Label* if_false = NULL;
   2943   Label* fall_through = NULL;
   2944   context()->PrepareTest(&materialize_true, &materialize_false,
   2945                          &if_true, &if_false, &fall_through);
   2946 
   2947   __ AssertNotSmi(rax);
   2948 
   2949   // Check whether this map has already been checked to be safe for default
   2950   // valueOf.
   2951   __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
   2952   __ testb(FieldOperand(rbx, Map::kBitField2Offset),
   2953            Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
   2954   __ j(not_zero, if_true);
   2955 
   2956   // Check for fast case object. Generate false result for slow case object.
   2957   __ movq(rcx, FieldOperand(rax, JSObject::kPropertiesOffset));
   2958   __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
   2959   __ CompareRoot(rcx, Heap::kHashTableMapRootIndex);
   2960   __ j(equal, if_false);
   2961 
   2962   // Look for valueOf string in the descriptor array, and indicate false if
   2963   // found. Since we omit an enumeration index check, if it is added via a
   2964   // transition that shares its descriptor array, this is a false positive.
   2965   Label entry, loop, done;
   2966 
   2967   // Skip loop if no descriptors are valid.
   2968   __ NumberOfOwnDescriptors(rcx, rbx);
   2969   __ cmpq(rcx, Immediate(0));
   2970   __ j(equal, &done);
   2971 
   2972   __ LoadInstanceDescriptors(rbx, rbx);
   2973   // rbx: descriptor array.
   2974   // rcx: valid entries in the descriptor array.
   2975   // Calculate the end of the descriptor array.
   2976   __ imul(rcx, rcx, Immediate(DescriptorArray::kDescriptorSize));
   2977   SmiIndex index = masm_->SmiToIndex(rdx, rcx, kPointerSizeLog2);
   2978   __ lea(rcx,
   2979          Operand(
   2980              rbx, index.reg, index.scale, DescriptorArray::kFirstOffset));
   2981   // Calculate location of the first key name.
   2982   __ addq(rbx, Immediate(DescriptorArray::kFirstOffset));
   2983   // Loop through all the keys in the descriptor array. If one of these is the
   2984   // internalized string "valueOf" the result is false.
   2985   __ jmp(&entry);
   2986   __ bind(&loop);
   2987   __ movq(rdx, FieldOperand(rbx, 0));
   2988   __ Cmp(rdx, isolate()->factory()->value_of_string());
   2989   __ j(equal, if_false);
   2990   __ addq(rbx, Immediate(DescriptorArray::kDescriptorSize * kPointerSize));
   2991   __ bind(&entry);
   2992   __ cmpq(rbx, rcx);
   2993   __ j(not_equal, &loop);
   2994 
   2995   __ bind(&done);
   2996   // Reload map as register rbx was used as temporary above.
   2997   __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
   2998 
   2999   // If a valueOf property is not found on the object check that its
   3000   // prototype is the un-modified String prototype. If not result is false.
   3001   __ movq(rcx, FieldOperand(rbx, Map::kPrototypeOffset));
   3002   __ testq(rcx, Immediate(kSmiTagMask));
   3003   __ j(zero, if_false);
   3004   __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
   3005   __ movq(rdx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
   3006   __ movq(rdx, FieldOperand(rdx, GlobalObject::kNativeContextOffset));
   3007   __ cmpq(rcx,
   3008           ContextOperand(rdx, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
   3009   __ j(not_equal, if_false);
   3010   // Set the bit in the map to indicate that it has been checked safe for
   3011   // default valueOf and set true result.
   3012   __ or_(FieldOperand(rbx, Map::kBitField2Offset),
   3013          Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
   3014   __ jmp(if_true);
   3015 
   3016   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   3017   context()->Plug(if_true, if_false);
   3018 }
   3019 
   3020 
   3021 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
   3022   ZoneList<Expression*>* args = expr->arguments();
   3023   ASSERT(args->length() == 1);
   3024 
   3025   VisitForAccumulatorValue(args->at(0));
   3026 
   3027   Label materialize_true, materialize_false;
   3028   Label* if_true = NULL;
   3029   Label* if_false = NULL;
   3030   Label* fall_through = NULL;
   3031   context()->PrepareTest(&materialize_true, &materialize_false,
   3032                          &if_true, &if_false, &fall_through);
   3033 
   3034   __ JumpIfSmi(rax, if_false);
   3035   __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
   3036   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   3037   Split(equal, if_true, if_false, fall_through);
   3038 
   3039   context()->Plug(if_true, if_false);
   3040 }
   3041 
   3042 
   3043 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
   3044   ZoneList<Expression*>* args = expr->arguments();
   3045   ASSERT(args->length() == 1);
   3046 
   3047   VisitForAccumulatorValue(args->at(0));
   3048 
   3049   Label materialize_true, materialize_false;
   3050   Label* if_true = NULL;
   3051   Label* if_false = NULL;
   3052   Label* fall_through = NULL;
   3053   context()->PrepareTest(&materialize_true, &materialize_false,
   3054                          &if_true, &if_false, &fall_through);
   3055 
   3056   __ JumpIfSmi(rax, if_false);
   3057   __ CmpObjectType(rax, JS_ARRAY_TYPE, rbx);
   3058   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   3059   Split(equal, if_true, if_false, fall_through);
   3060 
   3061   context()->Plug(if_true, if_false);
   3062 }
   3063 
   3064 
   3065 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
   3066   ZoneList<Expression*>* args = expr->arguments();
   3067   ASSERT(args->length() == 1);
   3068 
   3069   VisitForAccumulatorValue(args->at(0));
   3070 
   3071   Label materialize_true, materialize_false;
   3072   Label* if_true = NULL;
   3073   Label* if_false = NULL;
   3074   Label* fall_through = NULL;
   3075   context()->PrepareTest(&materialize_true, &materialize_false,
   3076                          &if_true, &if_false, &fall_through);
   3077 
   3078   __ JumpIfSmi(rax, if_false);
   3079   __ CmpObjectType(rax, JS_REGEXP_TYPE, rbx);
   3080   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   3081   Split(equal, if_true, if_false, fall_through);
   3082 
   3083   context()->Plug(if_true, if_false);
   3084 }
   3085 
   3086 
   3087 
   3088 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
   3089   ASSERT(expr->arguments()->length() == 0);
   3090 
   3091   Label materialize_true, materialize_false;
   3092   Label* if_true = NULL;
   3093   Label* if_false = NULL;
   3094   Label* fall_through = NULL;
   3095   context()->PrepareTest(&materialize_true, &materialize_false,
   3096                          &if_true, &if_false, &fall_through);
   3097 
   3098   // Get the frame pointer for the calling frame.
   3099   __ movq(rax, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
   3100 
   3101   // Skip the arguments adaptor frame if it exists.
   3102   Label check_frame_marker;
   3103   __ Cmp(Operand(rax, StandardFrameConstants::kContextOffset),
   3104          Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
   3105   __ j(not_equal, &check_frame_marker);
   3106   __ movq(rax, Operand(rax, StandardFrameConstants::kCallerFPOffset));
   3107 
   3108   // Check the marker in the calling frame.
   3109   __ bind(&check_frame_marker);
   3110   __ Cmp(Operand(rax, StandardFrameConstants::kMarkerOffset),
   3111          Smi::FromInt(StackFrame::CONSTRUCT));
   3112   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   3113   Split(equal, if_true, if_false, fall_through);
   3114 
   3115   context()->Plug(if_true, if_false);
   3116 }
   3117 
   3118 
   3119 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
   3120   ZoneList<Expression*>* args = expr->arguments();
   3121   ASSERT(args->length() == 2);
   3122 
   3123   // Load the two objects into registers and perform the comparison.
   3124   VisitForStackValue(args->at(0));
   3125   VisitForAccumulatorValue(args->at(1));
   3126 
   3127   Label materialize_true, materialize_false;
   3128   Label* if_true = NULL;
   3129   Label* if_false = NULL;
   3130   Label* fall_through = NULL;
   3131   context()->PrepareTest(&materialize_true, &materialize_false,
   3132                          &if_true, &if_false, &fall_through);
   3133 
   3134   __ pop(rbx);
   3135   __ cmpq(rax, rbx);
   3136   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   3137   Split(equal, if_true, if_false, fall_through);
   3138 
   3139   context()->Plug(if_true, if_false);
   3140 }
   3141 
   3142 
   3143 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
   3144   ZoneList<Expression*>* args = expr->arguments();
   3145   ASSERT(args->length() == 1);
   3146 
   3147   // ArgumentsAccessStub expects the key in rdx and the formal
   3148   // parameter count in rax.
   3149   VisitForAccumulatorValue(args->at(0));
   3150   __ movq(rdx, rax);
   3151   __ Move(rax, Smi::FromInt(info_->scope()->num_parameters()));
   3152   ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
   3153   __ CallStub(&stub);
   3154   context()->Plug(rax);
   3155 }
   3156 
   3157 
   3158 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
   3159   ASSERT(expr->arguments()->length() == 0);
   3160 
   3161   Label exit;
   3162   // Get the number of formal parameters.
   3163   __ Move(rax, Smi::FromInt(info_->scope()->num_parameters()));
   3164 
   3165   // Check if the calling frame is an arguments adaptor frame.
   3166   __ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
   3167   __ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset),
   3168          Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
   3169   __ j(not_equal, &exit, Label::kNear);
   3170 
   3171   // Arguments adaptor case: Read the arguments length from the
   3172   // adaptor frame.
   3173   __ movq(rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
   3174 
   3175   __ bind(&exit);
   3176   __ AssertSmi(rax);
   3177   context()->Plug(rax);
   3178 }
   3179 
   3180 
   3181 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
   3182   ZoneList<Expression*>* args = expr->arguments();
   3183   ASSERT(args->length() == 1);
   3184   Label done, null, function, non_function_constructor;
   3185 
   3186   VisitForAccumulatorValue(args->at(0));
   3187 
   3188   // If the object is a smi, we return null.
   3189   __ JumpIfSmi(rax, &null);
   3190 
   3191   // Check that the object is a JS object but take special care of JS
   3192   // functions to make sure they have 'Function' as their class.
   3193   // Assume that there are only two callable types, and one of them is at
   3194   // either end of the type range for JS object types. Saves extra comparisons.
   3195   STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
   3196   __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rax);
   3197   // Map is now in rax.
   3198   __ j(below, &null);
   3199   STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
   3200                 FIRST_SPEC_OBJECT_TYPE + 1);
   3201   __ j(equal, &function);
   3202 
   3203   __ CmpInstanceType(rax, LAST_SPEC_OBJECT_TYPE);
   3204   STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
   3205                 LAST_SPEC_OBJECT_TYPE - 1);
   3206   __ j(equal, &function);
   3207   // Assume that there is no larger type.
   3208   STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
   3209 
   3210   // Check if the constructor in the map is a JS function.
   3211   __ movq(rax, FieldOperand(rax, Map::kConstructorOffset));
   3212   __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
   3213   __ j(not_equal, &non_function_constructor);
   3214 
   3215   // rax now contains the constructor function. Grab the
   3216   // instance class name from there.
   3217   __ movq(rax, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset));
   3218   __ movq(rax, FieldOperand(rax, SharedFunctionInfo::kInstanceClassNameOffset));
   3219   __ jmp(&done);
   3220 
   3221   // Functions have class 'Function'.
   3222   __ bind(&function);
   3223   __ Move(rax, isolate()->factory()->function_class_string());
   3224   __ jmp(&done);
   3225 
   3226   // Objects with a non-function constructor have class 'Object'.
   3227   __ bind(&non_function_constructor);
   3228   __ Move(rax, isolate()->factory()->Object_string());
   3229   __ jmp(&done);
   3230 
   3231   // Non-JS objects have class null.
   3232   __ bind(&null);
   3233   __ LoadRoot(rax, Heap::kNullValueRootIndex);
   3234 
   3235   // All done.
   3236   __ bind(&done);
   3237 
   3238   context()->Plug(rax);
   3239 }
   3240 
   3241 
   3242 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
   3243   // Conditionally generate a log call.
   3244   // Args:
   3245   //   0 (literal string): The type of logging (corresponds to the flags).
   3246   //     This is used to determine whether or not to generate the log call.
   3247   //   1 (string): Format string.  Access the string at argument index 2
   3248   //     with '%2s' (see Logger::LogRuntime for all the formats).
   3249   //   2 (array): Arguments to the format string.
   3250   ZoneList<Expression*>* args = expr->arguments();
   3251   ASSERT_EQ(args->length(), 3);
   3252   if (CodeGenerator::ShouldGenerateLog(args->at(0))) {
   3253     VisitForStackValue(args->at(1));
   3254     VisitForStackValue(args->at(2));
   3255     __ CallRuntime(Runtime::kLog, 2);
   3256   }
   3257   // Finally, we're expected to leave a value on the top of the stack.
   3258   __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
   3259   context()->Plug(rax);
   3260 }
   3261 
   3262 
   3263 void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) {
   3264   ASSERT(expr->arguments()->length() == 0);
   3265 
   3266   Label slow_allocate_heapnumber;
   3267   Label heapnumber_allocated;
   3268 
   3269   __ AllocateHeapNumber(rbx, rcx, &slow_allocate_heapnumber);
   3270   __ jmp(&heapnumber_allocated);
   3271 
   3272   __ bind(&slow_allocate_heapnumber);
   3273   // Allocate a heap number.
   3274   __ CallRuntime(Runtime::kNumberAlloc, 0);
   3275   __ movq(rbx, rax);
   3276 
   3277   __ bind(&heapnumber_allocated);
   3278 
   3279   // Return a random uint32 number in rax.
   3280   // The fresh HeapNumber is in rbx, which is callee-save on both x64 ABIs.
   3281   __ PrepareCallCFunction(1);
   3282   __ movq(arg_reg_1,
   3283           ContextOperand(context_register(), Context::GLOBAL_OBJECT_INDEX));
   3284   __ movq(arg_reg_1,
   3285           FieldOperand(arg_reg_1, GlobalObject::kNativeContextOffset));
   3286   __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
   3287 
   3288   // Convert 32 random bits in rax to 0.(32 random bits) in a double
   3289   // by computing:
   3290   // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
   3291   __ movl(rcx, Immediate(0x49800000));  // 1.0 x 2^20 as single.
   3292   __ movd(xmm1, rcx);
   3293   __ movd(xmm0, rax);
   3294   __ cvtss2sd(xmm1, xmm1);
   3295   __ xorps(xmm0, xmm1);
   3296   __ subsd(xmm0, xmm1);
   3297   __ movsd(FieldOperand(rbx, HeapNumber::kValueOffset), xmm0);
   3298 
   3299   __ movq(rax, rbx);
   3300   context()->Plug(rax);
   3301 }
   3302 
   3303 
   3304 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
   3305   // Load the arguments on the stack and call the stub.
   3306   SubStringStub stub;
   3307   ZoneList<Expression*>* args = expr->arguments();
   3308   ASSERT(args->length() == 3);
   3309   VisitForStackValue(args->at(0));
   3310   VisitForStackValue(args->at(1));
   3311   VisitForStackValue(args->at(2));
   3312   __ CallStub(&stub);
   3313   context()->Plug(rax);
   3314 }
   3315 
   3316 
   3317 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
   3318   // Load the arguments on the stack and call the stub.
   3319   RegExpExecStub stub;
   3320   ZoneList<Expression*>* args = expr->arguments();
   3321   ASSERT(args->length() == 4);
   3322   VisitForStackValue(args->at(0));
   3323   VisitForStackValue(args->at(1));
   3324   VisitForStackValue(args->at(2));
   3325   VisitForStackValue(args->at(3));
   3326   __ CallStub(&stub);
   3327   context()->Plug(rax);
   3328 }
   3329 
   3330 
   3331 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
   3332   ZoneList<Expression*>* args = expr->arguments();
   3333   ASSERT(args->length() == 1);
   3334 
   3335   VisitForAccumulatorValue(args->at(0));  // Load the object.
   3336 
   3337   Label done;
   3338   // If the object is a smi return the object.
   3339   __ JumpIfSmi(rax, &done);
   3340   // If the object is not a value type, return the object.
   3341   __ CmpObjectType(rax, JS_VALUE_TYPE, rbx);
   3342   __ j(not_equal, &done);
   3343   __ movq(rax, FieldOperand(rax, JSValue::kValueOffset));
   3344 
   3345   __ bind(&done);
   3346   context()->Plug(rax);
   3347 }
   3348 
   3349 
   3350 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
   3351   ZoneList<Expression*>* args = expr->arguments();
   3352   ASSERT(args->length() == 2);
   3353   ASSERT_NE(NULL, args->at(1)->AsLiteral());
   3354   Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
   3355 
   3356   VisitForAccumulatorValue(args->at(0));  // Load the object.
   3357 
   3358   Label runtime, done, not_date_object;
   3359   Register object = rax;
   3360   Register result = rax;
   3361   Register scratch = rcx;
   3362 
   3363   __ JumpIfSmi(object, &not_date_object);
   3364   __ CmpObjectType(object, JS_DATE_TYPE, scratch);
   3365   __ j(not_equal, &not_date_object);
   3366 
   3367   if (index->value() == 0) {
   3368     __ movq(result, FieldOperand(object, JSDate::kValueOffset));
   3369     __ jmp(&done);
   3370   } else {
   3371     if (index->value() < JSDate::kFirstUncachedField) {
   3372       ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
   3373       Operand stamp_operand = __ ExternalOperand(stamp);
   3374       __ movq(scratch, stamp_operand);
   3375       __ cmpq(scratch, FieldOperand(object, JSDate::kCacheStampOffset));
   3376       __ j(not_equal, &runtime, Label::kNear);
   3377       __ movq(result, FieldOperand(object, JSDate::kValueOffset +
   3378                                            kPointerSize * index->value()));
   3379       __ jmp(&done);
   3380     }
   3381     __ bind(&runtime);
   3382     __ PrepareCallCFunction(2);
   3383   __ movq(arg_reg_1, object);
   3384   __ movq(arg_reg_2, index, RelocInfo::NONE64);
   3385     __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
   3386     __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
   3387     __ jmp(&done);
   3388   }
   3389 
   3390   __ bind(&not_date_object);
   3391   __ CallRuntime(Runtime::kThrowNotDateError, 0);
   3392   __ bind(&done);
   3393   context()->Plug(rax);
   3394 }
   3395 
   3396 
   3397 void FullCodeGenerator::EmitSeqStringSetCharCheck(Register string,
   3398                                                   Register index,
   3399                                                   Register value,
   3400                                                   uint32_t encoding_mask) {
   3401   __ Check(masm()->CheckSmi(index), kNonSmiIndex);
   3402   __ Check(masm()->CheckSmi(value), kNonSmiValue);
   3403 
   3404   __ SmiCompare(index, FieldOperand(string, String::kLengthOffset));
   3405   __ Check(less, kIndexIsTooLarge);
   3406 
   3407   __ SmiCompare(index, Smi::FromInt(0));
   3408   __ Check(greater_equal, kIndexIsNegative);
   3409 
   3410   __ push(value);
   3411   __ movq(value, FieldOperand(string, HeapObject::kMapOffset));
   3412   __ movzxbq(value, FieldOperand(value, Map::kInstanceTypeOffset));
   3413 
   3414   __ andb(value, Immediate(kStringRepresentationMask | kStringEncodingMask));
   3415   __ cmpq(value, Immediate(encoding_mask));
   3416   __ Check(equal, kUnexpectedStringType);
   3417   __ pop(value);
   3418 }
   3419 
   3420 
   3421 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
   3422   ZoneList<Expression*>* args = expr->arguments();
   3423   ASSERT_EQ(3, args->length());
   3424 
   3425   Register string = rax;
   3426   Register index = rbx;
   3427   Register value = rcx;
   3428 
   3429   VisitForStackValue(args->at(1));  // index
   3430   VisitForStackValue(args->at(2));  // value
   3431   __ pop(value);
   3432   __ pop(index);
   3433   VisitForAccumulatorValue(args->at(0));  // string
   3434 
   3435   if (FLAG_debug_code) {
   3436     static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
   3437     EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
   3438   }
   3439 
   3440   __ SmiToInteger32(value, value);
   3441   __ SmiToInteger32(index, index);
   3442   __ movb(FieldOperand(string, index, times_1, SeqOneByteString::kHeaderSize),
   3443           value);
   3444   context()->Plug(string);
   3445 }
   3446 
   3447 
   3448 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
   3449   ZoneList<Expression*>* args = expr->arguments();
   3450   ASSERT_EQ(3, args->length());
   3451 
   3452   Register string = rax;
   3453   Register index = rbx;
   3454   Register value = rcx;
   3455 
   3456   VisitForStackValue(args->at(1));  // index
   3457   VisitForStackValue(args->at(2));  // value
   3458   __ pop(value);
   3459   __ pop(index);
   3460   VisitForAccumulatorValue(args->at(0));  // string
   3461 
   3462   if (FLAG_debug_code) {
   3463     static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
   3464     EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
   3465   }
   3466 
   3467   __ SmiToInteger32(value, value);
   3468   __ SmiToInteger32(index, index);
   3469   __ movw(FieldOperand(string, index, times_2, SeqTwoByteString::kHeaderSize),
   3470           value);
   3471   context()->Plug(rax);
   3472 }
   3473 
   3474 
   3475 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
   3476   // Load the arguments on the stack and call the runtime function.
   3477   ZoneList<Expression*>* args = expr->arguments();
   3478   ASSERT(args->length() == 2);
   3479   VisitForStackValue(args->at(0));
   3480   VisitForStackValue(args->at(1));
   3481   MathPowStub stub(MathPowStub::ON_STACK);
   3482   __ CallStub(&stub);
   3483   context()->Plug(rax);
   3484 }
   3485 
   3486 
   3487 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
   3488   ZoneList<Expression*>* args = expr->arguments();
   3489   ASSERT(args->length() == 2);
   3490 
   3491   VisitForStackValue(args->at(0));  // Load the object.
   3492   VisitForAccumulatorValue(args->at(1));  // Load the value.
   3493   __ pop(rbx);  // rax = value. rbx = object.
   3494 
   3495   Label done;
   3496   // If the object is a smi, return the value.
   3497   __ JumpIfSmi(rbx, &done);
   3498 
   3499   // If the object is not a value type, return the value.
   3500   __ CmpObjectType(rbx, JS_VALUE_TYPE, rcx);
   3501   __ j(not_equal, &done);
   3502 
   3503   // Store the value.
   3504   __ movq(FieldOperand(rbx, JSValue::kValueOffset), rax);
   3505   // Update the write barrier.  Save the value as it will be
   3506   // overwritten by the write barrier code and is needed afterward.
   3507   __ movq(rdx, rax);
   3508   __ RecordWriteField(rbx, JSValue::kValueOffset, rdx, rcx, kDontSaveFPRegs);
   3509 
   3510   __ bind(&done);
   3511   context()->Plug(rax);
   3512 }
   3513 
   3514 
   3515 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
   3516   ZoneList<Expression*>* args = expr->arguments();
   3517   ASSERT_EQ(args->length(), 1);
   3518 
   3519   // Load the argument on the stack and call the stub.
   3520   VisitForStackValue(args->at(0));
   3521 
   3522   NumberToStringStub stub;
   3523   __ CallStub(&stub);
   3524   context()->Plug(rax);
   3525 }
   3526 
   3527 
   3528 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
   3529   ZoneList<Expression*>* args = expr->arguments();
   3530   ASSERT(args->length() == 1);
   3531 
   3532   VisitForAccumulatorValue(args->at(0));
   3533 
   3534   Label done;
   3535   StringCharFromCodeGenerator generator(rax, rbx);
   3536   generator.GenerateFast(masm_);
   3537   __ jmp(&done);
   3538 
   3539   NopRuntimeCallHelper call_helper;
   3540   generator.GenerateSlow(masm_, call_helper);
   3541 
   3542   __ bind(&done);
   3543   context()->Plug(rbx);
   3544 }
   3545 
   3546 
   3547 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
   3548   ZoneList<Expression*>* args = expr->arguments();
   3549   ASSERT(args->length() == 2);
   3550 
   3551   VisitForStackValue(args->at(0));
   3552   VisitForAccumulatorValue(args->at(1));
   3553 
   3554   Register object = rbx;
   3555   Register index = rax;
   3556   Register result = rdx;
   3557 
   3558   __ pop(object);
   3559 
   3560   Label need_conversion;
   3561   Label index_out_of_range;
   3562   Label done;
   3563   StringCharCodeAtGenerator generator(object,
   3564                                       index,
   3565                                       result,
   3566                                       &need_conversion,
   3567                                       &need_conversion,
   3568                                       &index_out_of_range,
   3569                                       STRING_INDEX_IS_NUMBER);
   3570   generator.GenerateFast(masm_);
   3571   __ jmp(&done);
   3572 
   3573   __ bind(&index_out_of_range);
   3574   // When the index is out of range, the spec requires us to return
   3575   // NaN.
   3576   __ LoadRoot(result, Heap::kNanValueRootIndex);
   3577   __ jmp(&done);
   3578 
   3579   __ bind(&need_conversion);
   3580   // Move the undefined value into the result register, which will
   3581   // trigger conversion.
   3582   __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
   3583   __ jmp(&done);
   3584 
   3585   NopRuntimeCallHelper call_helper;
   3586   generator.GenerateSlow(masm_, call_helper);
   3587 
   3588   __ bind(&done);
   3589   context()->Plug(result);
   3590 }
   3591 
   3592 
   3593 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
   3594   ZoneList<Expression*>* args = expr->arguments();
   3595   ASSERT(args->length() == 2);
   3596 
   3597   VisitForStackValue(args->at(0));
   3598   VisitForAccumulatorValue(args->at(1));
   3599 
   3600   Register object = rbx;
   3601   Register index = rax;
   3602   Register scratch = rdx;
   3603   Register result = rax;
   3604 
   3605   __ pop(object);
   3606 
   3607   Label need_conversion;
   3608   Label index_out_of_range;
   3609   Label done;
   3610   StringCharAtGenerator generator(object,
   3611                                   index,
   3612                                   scratch,
   3613                                   result,
   3614                                   &need_conversion,
   3615                                   &need_conversion,
   3616                                   &index_out_of_range,
   3617                                   STRING_INDEX_IS_NUMBER);
   3618   generator.GenerateFast(masm_);
   3619   __ jmp(&done);
   3620 
   3621   __ bind(&index_out_of_range);
   3622   // When the index is out of range, the spec requires us to return
   3623   // the empty string.
   3624   __ LoadRoot(result, Heap::kempty_stringRootIndex);
   3625   __ jmp(&done);
   3626 
   3627   __ bind(&need_conversion);
   3628   // Move smi zero into the result register, which will trigger
   3629   // conversion.
   3630   __ Move(result, Smi::FromInt(0));
   3631   __ jmp(&done);
   3632 
   3633   NopRuntimeCallHelper call_helper;
   3634   generator.GenerateSlow(masm_, call_helper);
   3635 
   3636   __ bind(&done);
   3637   context()->Plug(result);
   3638 }
   3639 
   3640 
   3641 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
   3642   ZoneList<Expression*>* args = expr->arguments();
   3643   ASSERT_EQ(2, args->length());
   3644 
   3645   VisitForStackValue(args->at(0));
   3646   VisitForStackValue(args->at(1));
   3647 
   3648   StringAddStub stub(STRING_ADD_CHECK_BOTH);
   3649   __ CallStub(&stub);
   3650   context()->Plug(rax);
   3651 }
   3652 
   3653 
   3654 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
   3655   ZoneList<Expression*>* args = expr->arguments();
   3656   ASSERT_EQ(2, args->length());
   3657 
   3658   VisitForStackValue(args->at(0));
   3659   VisitForStackValue(args->at(1));
   3660 
   3661   StringCompareStub stub;
   3662   __ CallStub(&stub);
   3663   context()->Plug(rax);
   3664 }
   3665 
   3666 
   3667 void FullCodeGenerator::EmitMathSin(CallRuntime* expr) {
   3668   // Load the argument on the stack and call the stub.
   3669   TranscendentalCacheStub stub(TranscendentalCache::SIN,
   3670                                TranscendentalCacheStub::TAGGED);
   3671   ZoneList<Expression*>* args = expr->arguments();
   3672   ASSERT(args->length() == 1);
   3673   VisitForStackValue(args->at(0));
   3674   __ CallStub(&stub);
   3675   context()->Plug(rax);
   3676 }
   3677 
   3678 
   3679 void FullCodeGenerator::EmitMathCos(CallRuntime* expr) {
   3680   // Load the argument on the stack and call the stub.
   3681   TranscendentalCacheStub stub(TranscendentalCache::COS,
   3682                                TranscendentalCacheStub::TAGGED);
   3683   ZoneList<Expression*>* args = expr->arguments();
   3684   ASSERT(args->length() == 1);
   3685   VisitForStackValue(args->at(0));
   3686   __ CallStub(&stub);
   3687   context()->Plug(rax);
   3688 }
   3689 
   3690 
   3691 void FullCodeGenerator::EmitMathTan(CallRuntime* expr) {
   3692   // Load the argument on the stack and call the stub.
   3693   TranscendentalCacheStub stub(TranscendentalCache::TAN,
   3694                                TranscendentalCacheStub::TAGGED);
   3695   ZoneList<Expression*>* args = expr->arguments();
   3696   ASSERT(args->length() == 1);
   3697   VisitForStackValue(args->at(0));
   3698   __ CallStub(&stub);
   3699   context()->Plug(rax);
   3700 }
   3701 
   3702 
   3703 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
   3704   // Load the argument on the stack and call the stub.
   3705   TranscendentalCacheStub stub(TranscendentalCache::LOG,
   3706                                TranscendentalCacheStub::TAGGED);
   3707   ZoneList<Expression*>* args = expr->arguments();
   3708   ASSERT(args->length() == 1);
   3709   VisitForStackValue(args->at(0));
   3710   __ CallStub(&stub);
   3711   context()->Plug(rax);
   3712 }
   3713 
   3714 
   3715 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
   3716   // Load the argument on the stack and call the runtime function.
   3717   ZoneList<Expression*>* args = expr->arguments();
   3718   ASSERT(args->length() == 1);
   3719   VisitForStackValue(args->at(0));
   3720   __ CallRuntime(Runtime::kMath_sqrt, 1);
   3721   context()->Plug(rax);
   3722 }
   3723 
   3724 
   3725 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
   3726   ZoneList<Expression*>* args = expr->arguments();
   3727   ASSERT(args->length() >= 2);
   3728 
   3729   int arg_count = args->length() - 2;  // 2 ~ receiver and function.
   3730   for (int i = 0; i < arg_count + 1; i++) {
   3731     VisitForStackValue(args->at(i));
   3732   }
   3733   VisitForAccumulatorValue(args->last());  // Function.
   3734 
   3735   Label runtime, done;
   3736   // Check for non-function argument (including proxy).
   3737   __ JumpIfSmi(rax, &runtime);
   3738   __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
   3739   __ j(not_equal, &runtime);
   3740 
   3741   // InvokeFunction requires the function in rdi. Move it in there.
   3742   __ movq(rdi, result_register());
   3743   ParameterCount count(arg_count);
   3744   __ InvokeFunction(rdi, count, CALL_FUNCTION,
   3745                     NullCallWrapper(), CALL_AS_METHOD);
   3746   __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
   3747   __ jmp(&done);
   3748 
   3749   __ bind(&runtime);
   3750   __ push(rax);
   3751   __ CallRuntime(Runtime::kCall, args->length());
   3752   __ bind(&done);
   3753 
   3754   context()->Plug(rax);
   3755 }
   3756 
   3757 
   3758 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
   3759   RegExpConstructResultStub stub;
   3760   ZoneList<Expression*>* args = expr->arguments();
   3761   ASSERT(args->length() == 3);
   3762   VisitForStackValue(args->at(0));
   3763   VisitForStackValue(args->at(1));
   3764   VisitForStackValue(args->at(2));
   3765   __ CallStub(&stub);
   3766   context()->Plug(rax);
   3767 }
   3768 
   3769 
   3770 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
   3771   ZoneList<Expression*>* args = expr->arguments();
   3772   ASSERT_EQ(2, args->length());
   3773 
   3774   ASSERT_NE(NULL, args->at(0)->AsLiteral());
   3775   int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
   3776 
   3777   Handle<FixedArray> jsfunction_result_caches(
   3778       isolate()->native_context()->jsfunction_result_caches());
   3779   if (jsfunction_result_caches->length() <= cache_id) {
   3780     __ Abort(kAttemptToUseUndefinedCache);
   3781     __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
   3782     context()->Plug(rax);
   3783     return;
   3784   }
   3785 
   3786   VisitForAccumulatorValue(args->at(1));
   3787 
   3788   Register key = rax;
   3789   Register cache = rbx;
   3790   Register tmp = rcx;
   3791   __ movq(cache, ContextOperand(rsi, Context::GLOBAL_OBJECT_INDEX));
   3792   __ movq(cache,
   3793           FieldOperand(cache, GlobalObject::kNativeContextOffset));
   3794   __ movq(cache,
   3795           ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
   3796   __ movq(cache,
   3797           FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
   3798 
   3799   Label done, not_found;
   3800   // tmp now holds finger offset as a smi.
   3801   STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
   3802   __ movq(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset));
   3803   SmiIndex index =
   3804       __ SmiToIndex(kScratchRegister, tmp, kPointerSizeLog2);
   3805   __ cmpq(key, FieldOperand(cache,
   3806                             index.reg,
   3807                             index.scale,
   3808                             FixedArray::kHeaderSize));
   3809   __ j(not_equal, &not_found, Label::kNear);
   3810   __ movq(rax, FieldOperand(cache,
   3811                             index.reg,
   3812                             index.scale,
   3813                             FixedArray::kHeaderSize + kPointerSize));
   3814   __ jmp(&done, Label::kNear);
   3815 
   3816   __ bind(&not_found);
   3817   // Call runtime to perform the lookup.
   3818   __ push(cache);
   3819   __ push(key);
   3820   __ CallRuntime(Runtime::kGetFromCache, 2);
   3821 
   3822   __ bind(&done);
   3823   context()->Plug(rax);
   3824 }
   3825 
   3826 
   3827 void FullCodeGenerator::EmitIsRegExpEquivalent(CallRuntime* expr) {
   3828   ZoneList<Expression*>* args = expr->arguments();
   3829   ASSERT_EQ(2, args->length());
   3830 
   3831   Register right = rax;
   3832   Register left = rbx;
   3833   Register tmp = rcx;
   3834 
   3835   VisitForStackValue(args->at(0));
   3836   VisitForAccumulatorValue(args->at(1));
   3837   __ pop(left);
   3838 
   3839   Label done, fail, ok;
   3840   __ cmpq(left, right);
   3841   __ j(equal, &ok, Label::kNear);
   3842   // Fail if either is a non-HeapObject.
   3843   Condition either_smi = masm()->CheckEitherSmi(left, right, tmp);
   3844   __ j(either_smi, &fail, Label::kNear);
   3845   __ j(zero, &fail, Label::kNear);
   3846   __ movq(tmp, FieldOperand(left, HeapObject::kMapOffset));
   3847   __ cmpb(FieldOperand(tmp, Map::kInstanceTypeOffset),
   3848           Immediate(JS_REGEXP_TYPE));
   3849   __ j(not_equal, &fail, Label::kNear);
   3850   __ cmpq(tmp, FieldOperand(right, HeapObject::kMapOffset));
   3851   __ j(not_equal, &fail, Label::kNear);
   3852   __ movq(tmp, FieldOperand(left, JSRegExp::kDataOffset));
   3853   __ cmpq(tmp, FieldOperand(right, JSRegExp::kDataOffset));
   3854   __ j(equal, &ok, Label::kNear);
   3855   __ bind(&fail);
   3856   __ Move(rax, isolate()->factory()->false_value());
   3857   __ jmp(&done, Label::kNear);
   3858   __ bind(&ok);
   3859   __ Move(rax, isolate()->factory()->true_value());
   3860   __ bind(&done);
   3861 
   3862   context()->Plug(rax);
   3863 }
   3864 
   3865 
   3866 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
   3867   ZoneList<Expression*>* args = expr->arguments();
   3868   ASSERT(args->length() == 1);
   3869 
   3870   VisitForAccumulatorValue(args->at(0));
   3871 
   3872   Label materialize_true, materialize_false;
   3873   Label* if_true = NULL;
   3874   Label* if_false = NULL;
   3875   Label* fall_through = NULL;
   3876   context()->PrepareTest(&materialize_true, &materialize_false,
   3877                          &if_true, &if_false, &fall_through);
   3878 
   3879   __ testl(FieldOperand(rax, String::kHashFieldOffset),
   3880            Immediate(String::kContainsCachedArrayIndexMask));
   3881   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   3882   __ j(zero, if_true);
   3883   __ jmp(if_false);
   3884 
   3885   context()->Plug(if_true, if_false);
   3886 }
   3887 
   3888 
   3889 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
   3890   ZoneList<Expression*>* args = expr->arguments();
   3891   ASSERT(args->length() == 1);
   3892   VisitForAccumulatorValue(args->at(0));
   3893 
   3894   __ AssertString(rax);
   3895 
   3896   __ movl(rax, FieldOperand(rax, String::kHashFieldOffset));
   3897   ASSERT(String::kHashShift >= kSmiTagSize);
   3898   __ IndexFromHash(rax, rax);
   3899 
   3900   context()->Plug(rax);
   3901 }
   3902 
   3903 
   3904 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
   3905   Label bailout, return_result, done, one_char_separator, long_separator,
   3906       non_trivial_array, not_size_one_array, loop,
   3907       loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
   3908   ZoneList<Expression*>* args = expr->arguments();
   3909   ASSERT(args->length() == 2);
   3910   // We will leave the separator on the stack until the end of the function.
   3911   VisitForStackValue(args->at(1));
   3912   // Load this to rax (= array)
   3913   VisitForAccumulatorValue(args->at(0));
   3914   // All aliases of the same register have disjoint lifetimes.
   3915   Register array = rax;
   3916   Register elements = no_reg;  // Will be rax.
   3917 
   3918   Register index = rdx;
   3919 
   3920   Register string_length = rcx;
   3921 
   3922   Register string = rsi;
   3923 
   3924   Register scratch = rbx;
   3925 
   3926   Register array_length = rdi;
   3927   Register result_pos = no_reg;  // Will be rdi.
   3928 
   3929   Operand separator_operand =    Operand(rsp, 2 * kPointerSize);
   3930   Operand result_operand =       Operand(rsp, 1 * kPointerSize);
   3931   Operand array_length_operand = Operand(rsp, 0 * kPointerSize);
   3932   // Separator operand is already pushed. Make room for the two
   3933   // other stack fields, and clear the direction flag in anticipation
   3934   // of calling CopyBytes.
   3935   __ subq(rsp, Immediate(2 * kPointerSize));
   3936   __ cld();
   3937   // Check that the array is a JSArray
   3938   __ JumpIfSmi(array, &bailout);
   3939   __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
   3940   __ j(not_equal, &bailout);
   3941 
   3942   // Check that the array has fast elements.
   3943   __ CheckFastElements(scratch, &bailout);
   3944 
   3945   // Array has fast elements, so its length must be a smi.
   3946   // If the array has length zero, return the empty string.
   3947   __ movq(array_length, FieldOperand(array, JSArray::kLengthOffset));
   3948   __ SmiCompare(array_length, Smi::FromInt(0));
   3949   __ j(not_zero, &non_trivial_array);
   3950   __ LoadRoot(rax, Heap::kempty_stringRootIndex);
   3951   __ jmp(&return_result);
   3952 
   3953   // Save the array length on the stack.
   3954   __ bind(&non_trivial_array);
   3955   __ SmiToInteger32(array_length, array_length);
   3956   __ movl(array_length_operand, array_length);
   3957 
   3958   // Save the FixedArray containing array's elements.
   3959   // End of array's live range.
   3960   elements = array;
   3961   __ movq(elements, FieldOperand(array, JSArray::kElementsOffset));
   3962   array = no_reg;
   3963 
   3964 
   3965   // Check that all array elements are sequential ASCII strings, and
   3966   // accumulate the sum of their lengths, as a smi-encoded value.
   3967   __ Set(index, 0);
   3968   __ Set(string_length, 0);
   3969   // Loop condition: while (index < array_length).
   3970   // Live loop registers: index(int32), array_length(int32), string(String*),
   3971   //                      scratch, string_length(int32), elements(FixedArray*).
   3972   if (generate_debug_code_) {
   3973     __ cmpq(index, array_length);
   3974     __ Assert(below, kNoEmptyArraysHereInEmitFastAsciiArrayJoin);
   3975   }
   3976   __ bind(&loop);
   3977   __ movq(string, FieldOperand(elements,
   3978                                index,
   3979                                times_pointer_size,
   3980                                FixedArray::kHeaderSize));
   3981   __ JumpIfSmi(string, &bailout);
   3982   __ movq(scratch, FieldOperand(string, HeapObject::kMapOffset));
   3983   __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
   3984   __ andb(scratch, Immediate(
   3985       kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
   3986   __ cmpb(scratch, Immediate(kStringTag | kOneByteStringTag | kSeqStringTag));
   3987   __ j(not_equal, &bailout);
   3988   __ AddSmiField(string_length,
   3989                  FieldOperand(string, SeqOneByteString::kLengthOffset));
   3990   __ j(overflow, &bailout);
   3991   __ incl(index);
   3992   __ cmpl(index, array_length);
   3993   __ j(less, &loop);
   3994 
   3995   // Live registers:
   3996   // string_length: Sum of string lengths.
   3997   // elements: FixedArray of strings.
   3998   // index: Array length.
   3999   // array_length: Array length.
   4000 
   4001   // If array_length is 1, return elements[0], a string.
   4002   __ cmpl(array_length, Immediate(1));
   4003   __ j(not_equal, &not_size_one_array);
   4004   __ movq(rax, FieldOperand(elements, FixedArray::kHeaderSize));
   4005   __ jmp(&return_result);
   4006 
   4007   __ bind(&not_size_one_array);
   4008 
   4009   // End of array_length live range.
   4010   result_pos = array_length;
   4011   array_length = no_reg;
   4012 
   4013   // Live registers:
   4014   // string_length: Sum of string lengths.
   4015   // elements: FixedArray of strings.
   4016   // index: Array length.
   4017 
   4018   // Check that the separator is a sequential ASCII string.
   4019   __ movq(string, separator_operand);
   4020   __ JumpIfSmi(string, &bailout);
   4021   __ movq(scratch, FieldOperand(string, HeapObject::kMapOffset));
   4022   __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
   4023   __ andb(scratch, Immediate(
   4024       kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
   4025   __ cmpb(scratch, Immediate(kStringTag | kOneByteStringTag | kSeqStringTag));
   4026   __ j(not_equal, &bailout);
   4027 
   4028   // Live registers:
   4029   // string_length: Sum of string lengths.
   4030   // elements: FixedArray of strings.
   4031   // index: Array length.
   4032   // string: Separator string.
   4033 
   4034   // Add (separator length times (array_length - 1)) to string_length.
   4035   __ SmiToInteger32(scratch,
   4036                     FieldOperand(string, SeqOneByteString::kLengthOffset));
   4037   __ decl(index);
   4038   __ imull(scratch, index);
   4039   __ j(overflow, &bailout);
   4040   __ addl(string_length, scratch);
   4041   __ j(overflow, &bailout);
   4042 
   4043   // Live registers and stack values:
   4044   //   string_length: Total length of result string.
   4045   //   elements: FixedArray of strings.
   4046   __ AllocateAsciiString(result_pos, string_length, scratch,
   4047                          index, string, &bailout);
   4048   __ movq(result_operand, result_pos);
   4049   __ lea(result_pos, FieldOperand(result_pos, SeqOneByteString::kHeaderSize));
   4050 
   4051   __ movq(string, separator_operand);
   4052   __ SmiCompare(FieldOperand(string, SeqOneByteString::kLengthOffset),
   4053                 Smi::FromInt(1));
   4054   __ j(equal, &one_char_separator);
   4055   __ j(greater, &long_separator);
   4056 
   4057 
   4058   // Empty separator case:
   4059   __ Set(index, 0);
   4060   __ movl(scratch, array_length_operand);
   4061   __ jmp(&loop_1_condition);
   4062   // Loop condition: while (index < array_length).
   4063   __ bind(&loop_1);
   4064   // Each iteration of the loop concatenates one string to the result.
   4065   // Live values in registers:
   4066   //   index: which element of the elements array we are adding to the result.
   4067   //   result_pos: the position to which we are currently copying characters.
   4068   //   elements: the FixedArray of strings we are joining.
   4069   //   scratch: array length.
   4070 
   4071   // Get string = array[index].
   4072   __ movq(string, FieldOperand(elements, index,
   4073                                times_pointer_size,
   4074                                FixedArray::kHeaderSize));
   4075   __ SmiToInteger32(string_length,
   4076                     FieldOperand(string, String::kLengthOffset));
   4077   __ lea(string,
   4078          FieldOperand(string, SeqOneByteString::kHeaderSize));
   4079   __ CopyBytes(result_pos, string, string_length);
   4080   __ incl(index);
   4081   __ bind(&loop_1_condition);
   4082   __ cmpl(index, scratch);
   4083   __ j(less, &loop_1);  // Loop while (index < array_length).
   4084   __ jmp(&done);
   4085 
   4086   // Generic bailout code used from several places.
   4087   __ bind(&bailout);
   4088   __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
   4089   __ jmp(&return_result);
   4090 
   4091 
   4092   // One-character separator case
   4093   __ bind(&one_char_separator);
   4094   // Get the separator ASCII character value.
   4095   // Register "string" holds the separator.
   4096   __ movzxbl(scratch, FieldOperand(string, SeqOneByteString::kHeaderSize));
   4097   __ Set(index, 0);
   4098   // Jump into the loop after the code that copies the separator, so the first
   4099   // element is not preceded by a separator
   4100   __ jmp(&loop_2_entry);
   4101   // Loop condition: while (index < length).
   4102   __ bind(&loop_2);
   4103   // Each iteration of the loop concatenates one string to the result.
   4104   // Live values in registers:
   4105   //   elements: The FixedArray of strings we are joining.
   4106   //   index: which element of the elements array we are adding to the result.
   4107   //   result_pos: the position to which we are currently copying characters.
   4108   //   scratch: Separator character.
   4109 
   4110   // Copy the separator character to the result.
   4111   __ movb(Operand(result_pos, 0), scratch);
   4112   __ incq(result_pos);
   4113 
   4114   __ bind(&loop_2_entry);
   4115   // Get string = array[index].
   4116   __ movq(string, FieldOperand(elements, index,
   4117                                times_pointer_size,
   4118                                FixedArray::kHeaderSize));
   4119   __ SmiToInteger32(string_length,
   4120                     FieldOperand(string, String::kLengthOffset));
   4121   __ lea(string,
   4122          FieldOperand(string, SeqOneByteString::kHeaderSize));
   4123   __ CopyBytes(result_pos, string, string_length);
   4124   __ incl(index);
   4125   __ cmpl(index, array_length_operand);
   4126   __ j(less, &loop_2);  // End while (index < length).
   4127   __ jmp(&done);
   4128 
   4129 
   4130   // Long separator case (separator is more than one character).
   4131   __ bind(&long_separator);
   4132 
   4133   // Make elements point to end of elements array, and index
   4134   // count from -array_length to zero, so we don't need to maintain
   4135   // a loop limit.
   4136   __ movl(index, array_length_operand);
   4137   __ lea(elements, FieldOperand(elements, index, times_pointer_size,
   4138                                 FixedArray::kHeaderSize));
   4139   __ neg(index);
   4140 
   4141   // Replace separator string with pointer to its first character, and
   4142   // make scratch be its length.
   4143   __ movq(string, separator_operand);
   4144   __ SmiToInteger32(scratch,
   4145                     FieldOperand(string, String::kLengthOffset));
   4146   __ lea(string,
   4147          FieldOperand(string, SeqOneByteString::kHeaderSize));
   4148   __ movq(separator_operand, string);
   4149 
   4150   // Jump into the loop after the code that copies the separator, so the first
   4151   // element is not preceded by a separator
   4152   __ jmp(&loop_3_entry);
   4153   // Loop condition: while (index < length).
   4154   __ bind(&loop_3);
   4155   // Each iteration of the loop concatenates one string to the result.
   4156   // Live values in registers:
   4157   //   index: which element of the elements array we are adding to the result.
   4158   //   result_pos: the position to which we are currently copying characters.
   4159   //   scratch: Separator length.
   4160   //   separator_operand (rsp[0x10]): Address of first char of separator.
   4161 
   4162   // Copy the separator to the result.
   4163   __ movq(string, separator_operand);
   4164   __ movl(string_length, scratch);
   4165   __ CopyBytes(result_pos, string, string_length, 2);
   4166 
   4167   __ bind(&loop_3_entry);
   4168   // Get string = array[index].
   4169   __ movq(string, Operand(elements, index, times_pointer_size, 0));
   4170   __ SmiToInteger32(string_length,
   4171                     FieldOperand(string, String::kLengthOffset));
   4172   __ lea(string,
   4173          FieldOperand(string, SeqOneByteString::kHeaderSize));
   4174   __ CopyBytes(result_pos, string, string_length);
   4175   __ incq(index);
   4176   __ j(not_equal, &loop_3);  // Loop while (index < 0).
   4177 
   4178   __ bind(&done);
   4179   __ movq(rax, result_operand);
   4180 
   4181   __ bind(&return_result);
   4182   // Drop temp values from the stack, and restore context register.
   4183   __ addq(rsp, Immediate(3 * kPointerSize));
   4184   __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
   4185   context()->Plug(rax);
   4186 }
   4187 
   4188 
   4189 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
   4190   Handle<String> name = expr->name();
   4191   if (name->length() > 0 && name->Get(0) == '_') {
   4192     Comment cmnt(masm_, "[ InlineRuntimeCall");
   4193     EmitInlineRuntimeCall(expr);
   4194     return;
   4195   }
   4196 
   4197   Comment cmnt(masm_, "[ CallRuntime");
   4198   ZoneList<Expression*>* args = expr->arguments();
   4199 
   4200   if (expr->is_jsruntime()) {
   4201     // Prepare for calling JS runtime function.
   4202     __ movq(rax, GlobalObjectOperand());
   4203     __ push(FieldOperand(rax, GlobalObject::kBuiltinsOffset));
   4204   }
   4205 
   4206   // Push the arguments ("left-to-right").
   4207   int arg_count = args->length();
   4208   for (int i = 0; i < arg_count; i++) {
   4209     VisitForStackValue(args->at(i));
   4210   }
   4211 
   4212   if (expr->is_jsruntime()) {
   4213     // Call the JS runtime function using a call IC.
   4214     __ Move(rcx, expr->name());
   4215     RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
   4216     Handle<Code> ic =
   4217         isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
   4218     CallIC(ic, mode, expr->CallRuntimeFeedbackId());
   4219     // Restore context register.
   4220     __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
   4221   } else {
   4222     __ CallRuntime(expr->function(), arg_count);
   4223   }
   4224   context()->Plug(rax);
   4225 }
   4226 
   4227 
   4228 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
   4229   switch (expr->op()) {
   4230     case Token::DELETE: {
   4231       Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
   4232       Property* property = expr->expression()->AsProperty();
   4233       VariableProxy* proxy = expr->expression()->AsVariableProxy();
   4234 
   4235       if (property != NULL) {
   4236         VisitForStackValue(property->obj());
   4237         VisitForStackValue(property->key());
   4238         StrictModeFlag strict_mode_flag = (language_mode() == CLASSIC_MODE)
   4239             ? kNonStrictMode : kStrictMode;
   4240         __ Push(Smi::FromInt(strict_mode_flag));
   4241         __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
   4242         context()->Plug(rax);
   4243       } else if (proxy != NULL) {
   4244         Variable* var = proxy->var();
   4245         // Delete of an unqualified identifier is disallowed in strict mode
   4246         // but "delete this" is allowed.
   4247         ASSERT(language_mode() == CLASSIC_MODE || var->is_this());
   4248         if (var->IsUnallocated()) {
   4249           __ push(GlobalObjectOperand());
   4250           __ Push(var->name());
   4251           __ Push(Smi::FromInt(kNonStrictMode));
   4252           __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
   4253           context()->Plug(rax);
   4254         } else if (var->IsStackAllocated() || var->IsContextSlot()) {
   4255           // Result of deleting non-global variables is false.  'this' is
   4256           // not really a variable, though we implement it as one.  The
   4257           // subexpression does not have side effects.
   4258           context()->Plug(var->is_this());
   4259         } else {
   4260           // Non-global variable.  Call the runtime to try to delete from the
   4261           // context where the variable was introduced.
   4262           __ push(context_register());
   4263           __ Push(var->name());
   4264           __ CallRuntime(Runtime::kDeleteContextSlot, 2);
   4265           context()->Plug(rax);
   4266         }
   4267       } else {
   4268         // Result of deleting non-property, non-variable reference is true.
   4269         // The subexpression may have side effects.
   4270         VisitForEffect(expr->expression());
   4271         context()->Plug(true);
   4272       }
   4273       break;
   4274     }
   4275 
   4276     case Token::VOID: {
   4277       Comment cmnt(masm_, "[ UnaryOperation (VOID)");
   4278       VisitForEffect(expr->expression());
   4279       context()->Plug(Heap::kUndefinedValueRootIndex);
   4280       break;
   4281     }
   4282 
   4283     case Token::NOT: {
   4284       Comment cmnt(masm_, "[ UnaryOperation (NOT)");
   4285       if (context()->IsEffect()) {
   4286         // Unary NOT has no side effects so it's only necessary to visit the
   4287         // subexpression.  Match the optimizing compiler by not branching.
   4288         VisitForEffect(expr->expression());
   4289       } else if (context()->IsTest()) {
   4290         const TestContext* test = TestContext::cast(context());
   4291         // The labels are swapped for the recursive call.
   4292         VisitForControl(expr->expression(),
   4293                         test->false_label(),
   4294                         test->true_label(),
   4295                         test->fall_through());
   4296         context()->Plug(test->true_label(), test->false_label());
   4297       } else {
   4298         // We handle value contexts explicitly rather than simply visiting
   4299         // for control and plugging the control flow into the context,
   4300         // because we need to prepare a pair of extra administrative AST ids
   4301         // for the optimizing compiler.
   4302         ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
   4303         Label materialize_true, materialize_false, done;
   4304         VisitForControl(expr->expression(),
   4305                         &materialize_false,
   4306                         &materialize_true,
   4307                         &materialize_true);
   4308         __ bind(&materialize_true);
   4309         PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
   4310         if (context()->IsAccumulatorValue()) {
   4311           __ LoadRoot(rax, Heap::kTrueValueRootIndex);
   4312         } else {
   4313           __ PushRoot(Heap::kTrueValueRootIndex);
   4314         }
   4315         __ jmp(&done, Label::kNear);
   4316         __ bind(&materialize_false);
   4317         PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
   4318         if (context()->IsAccumulatorValue()) {
   4319           __ LoadRoot(rax, Heap::kFalseValueRootIndex);
   4320         } else {
   4321           __ PushRoot(Heap::kFalseValueRootIndex);
   4322         }
   4323         __ bind(&done);
   4324       }
   4325       break;
   4326     }
   4327 
   4328     case Token::TYPEOF: {
   4329       Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
   4330       { StackValueContext context(this);
   4331         VisitForTypeofValue(expr->expression());
   4332       }
   4333       __ CallRuntime(Runtime::kTypeof, 1);
   4334       context()->Plug(rax);
   4335       break;
   4336     }
   4337 
   4338     default:
   4339       UNREACHABLE();
   4340   }
   4341 }
   4342 
   4343 
   4344 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
   4345   Comment cmnt(masm_, "[ CountOperation");
   4346   SetSourcePosition(expr->position());
   4347 
   4348   // Invalid left-hand-sides are rewritten to have a 'throw
   4349   // ReferenceError' as the left-hand side.
   4350   if (!expr->expression()->IsValidLeftHandSide()) {
   4351     VisitForEffect(expr->expression());
   4352     return;
   4353   }
   4354 
   4355   // Expression can only be a property, a global or a (parameter or local)
   4356   // slot.
   4357   enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
   4358   LhsKind assign_type = VARIABLE;
   4359   Property* prop = expr->expression()->AsProperty();
   4360   // In case of a property we use the uninitialized expression context
   4361   // of the key to detect a named property.
   4362   if (prop != NULL) {
   4363     assign_type =
   4364         (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
   4365   }
   4366 
   4367   // Evaluate expression and get value.
   4368   if (assign_type == VARIABLE) {
   4369     ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
   4370     AccumulatorValueContext context(this);
   4371     EmitVariableLoad(expr->expression()->AsVariableProxy());
   4372   } else {
   4373     // Reserve space for result of postfix operation.
   4374     if (expr->is_postfix() && !context()->IsEffect()) {
   4375       __ Push(Smi::FromInt(0));
   4376     }
   4377     if (assign_type == NAMED_PROPERTY) {
   4378       VisitForAccumulatorValue(prop->obj());
   4379       __ push(rax);  // Copy of receiver, needed for later store.
   4380       EmitNamedPropertyLoad(prop);
   4381     } else {
   4382       VisitForStackValue(prop->obj());
   4383       VisitForAccumulatorValue(prop->key());
   4384       __ movq(rdx, Operand(rsp, 0));  // Leave receiver on stack
   4385       __ push(rax);  // Copy of key, needed for later store.
   4386       EmitKeyedPropertyLoad(prop);
   4387     }
   4388   }
   4389 
   4390   // We need a second deoptimization point after loading the value
   4391   // in case evaluating the property load my have a side effect.
   4392   if (assign_type == VARIABLE) {
   4393     PrepareForBailout(expr->expression(), TOS_REG);
   4394   } else {
   4395     PrepareForBailoutForId(prop->LoadId(), TOS_REG);
   4396   }
   4397 
   4398   // Call ToNumber only if operand is not a smi.
   4399   Label no_conversion;
   4400   if (ShouldInlineSmiCase(expr->op())) {
   4401     __ JumpIfSmi(rax, &no_conversion, Label::kNear);
   4402   }
   4403   ToNumberStub convert_stub;
   4404   __ CallStub(&convert_stub);
   4405   __ bind(&no_conversion);
   4406 
   4407   // Save result for postfix expressions.
   4408   if (expr->is_postfix()) {
   4409     if (!context()->IsEffect()) {
   4410       // Save the result on the stack. If we have a named or keyed property
   4411       // we store the result under the receiver that is currently on top
   4412       // of the stack.
   4413       switch (assign_type) {
   4414         case VARIABLE:
   4415           __ push(rax);
   4416           break;
   4417         case NAMED_PROPERTY:
   4418           __ movq(Operand(rsp, kPointerSize), rax);
   4419           break;
   4420         case KEYED_PROPERTY:
   4421           __ movq(Operand(rsp, 2 * kPointerSize), rax);
   4422           break;
   4423       }
   4424     }
   4425   }
   4426 
   4427   // Inline smi case if we are in a loop.
   4428   Label done, stub_call;
   4429   JumpPatchSite patch_site(masm_);
   4430 
   4431   if (ShouldInlineSmiCase(expr->op())) {
   4432     if (expr->op() == Token::INC) {
   4433       __ SmiAddConstant(rax, rax, Smi::FromInt(1));
   4434     } else {
   4435       __ SmiSubConstant(rax, rax, Smi::FromInt(1));
   4436     }
   4437     __ j(overflow, &stub_call, Label::kNear);
   4438     // We could eliminate this smi check if we split the code at
   4439     // the first smi check before calling ToNumber.
   4440     patch_site.EmitJumpIfSmi(rax, &done, Label::kNear);
   4441 
   4442     __ bind(&stub_call);
   4443     // Call stub. Undo operation first.
   4444     if (expr->op() == Token::INC) {
   4445       __ SmiSubConstant(rax, rax, Smi::FromInt(1));
   4446     } else {
   4447       __ SmiAddConstant(rax, rax, Smi::FromInt(1));
   4448     }
   4449   }
   4450 
   4451   // Record position before stub call.
   4452   SetSourcePosition(expr->position());
   4453 
   4454   // Call stub for +1/-1.
   4455   __ movq(rdx, rax);
   4456   __ Move(rax, Smi::FromInt(1));
   4457   BinaryOpStub stub(expr->binary_op(), NO_OVERWRITE);
   4458   CallIC(stub.GetCode(isolate()),
   4459          RelocInfo::CODE_TARGET,
   4460          expr->CountBinOpFeedbackId());
   4461   patch_site.EmitPatchInfo();
   4462   __ bind(&done);
   4463 
   4464   // Store the value returned in rax.
   4465   switch (assign_type) {
   4466     case VARIABLE:
   4467       if (expr->is_postfix()) {
   4468         // Perform the assignment as if via '='.
   4469         { EffectContext context(this);
   4470           EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
   4471                                  Token::ASSIGN);
   4472           PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   4473           context.Plug(rax);
   4474         }
   4475         // For all contexts except kEffect: We have the result on
   4476         // top of the stack.
   4477         if (!context()->IsEffect()) {
   4478           context()->PlugTOS();
   4479         }
   4480       } else {
   4481         // Perform the assignment as if via '='.
   4482         EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
   4483                                Token::ASSIGN);
   4484         PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   4485         context()->Plug(rax);
   4486       }
   4487       break;
   4488     case NAMED_PROPERTY: {
   4489       __ Move(rcx, prop->key()->AsLiteral()->value());
   4490       __ pop(rdx);
   4491       Handle<Code> ic = is_classic_mode()
   4492           ? isolate()->builtins()->StoreIC_Initialize()
   4493           : isolate()->builtins()->StoreIC_Initialize_Strict();
   4494       CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
   4495       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   4496       if (expr->is_postfix()) {
   4497         if (!context()->IsEffect()) {
   4498           context()->PlugTOS();
   4499         }
   4500       } else {
   4501         context()->Plug(rax);
   4502       }
   4503       break;
   4504     }
   4505     case KEYED_PROPERTY: {
   4506       __ pop(rcx);
   4507       __ pop(rdx);
   4508       Handle<Code> ic = is_classic_mode()
   4509           ? isolate()->builtins()->KeyedStoreIC_Initialize()
   4510           : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
   4511       CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
   4512       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   4513       if (expr->is_postfix()) {
   4514         if (!context()->IsEffect()) {
   4515           context()->PlugTOS();
   4516         }
   4517       } else {
   4518         context()->Plug(rax);
   4519       }
   4520       break;
   4521     }
   4522   }
   4523 }
   4524 
   4525 
   4526 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
   4527   VariableProxy* proxy = expr->AsVariableProxy();
   4528   ASSERT(!context()->IsEffect());
   4529   ASSERT(!context()->IsTest());
   4530 
   4531   if (proxy != NULL && proxy->var()->IsUnallocated()) {
   4532     Comment cmnt(masm_, "Global variable");
   4533     __ Move(rcx, proxy->name());
   4534     __ movq(rax, GlobalObjectOperand());
   4535     Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
   4536     // Use a regular load, not a contextual load, to avoid a reference
   4537     // error.
   4538     CallIC(ic);
   4539     PrepareForBailout(expr, TOS_REG);
   4540     context()->Plug(rax);
   4541   } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
   4542     Label done, slow;
   4543 
   4544     // Generate code for loading from variables potentially shadowed
   4545     // by eval-introduced variables.
   4546     EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
   4547 
   4548     __ bind(&slow);
   4549     __ push(rsi);
   4550     __ Push(proxy->name());
   4551     __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
   4552     PrepareForBailout(expr, TOS_REG);
   4553     __ bind(&done);
   4554 
   4555     context()->Plug(rax);
   4556   } else {
   4557     // This expression cannot throw a reference error at the top level.
   4558     VisitInDuplicateContext(expr);
   4559   }
   4560 }
   4561 
   4562 
   4563 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
   4564                                                  Expression* sub_expr,
   4565                                                  Handle<String> check) {
   4566   Label materialize_true, materialize_false;
   4567   Label* if_true = NULL;
   4568   Label* if_false = NULL;
   4569   Label* fall_through = NULL;
   4570   context()->PrepareTest(&materialize_true, &materialize_false,
   4571                          &if_true, &if_false, &fall_through);
   4572 
   4573   { AccumulatorValueContext context(this);
   4574     VisitForTypeofValue(sub_expr);
   4575   }
   4576   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   4577 
   4578   if (check->Equals(isolate()->heap()->number_string())) {
   4579     __ JumpIfSmi(rax, if_true);
   4580     __ movq(rax, FieldOperand(rax, HeapObject::kMapOffset));
   4581     __ CompareRoot(rax, Heap::kHeapNumberMapRootIndex);
   4582     Split(equal, if_true, if_false, fall_through);
   4583   } else if (check->Equals(isolate()->heap()->string_string())) {
   4584     __ JumpIfSmi(rax, if_false);
   4585     // Check for undetectable objects => false.
   4586     __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx);
   4587     __ j(above_equal, if_false);
   4588     __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
   4589              Immediate(1 << Map::kIsUndetectable));
   4590     Split(zero, if_true, if_false, fall_through);
   4591   } else if (check->Equals(isolate()->heap()->symbol_string())) {
   4592     __ JumpIfSmi(rax, if_false);
   4593     __ CmpObjectType(rax, SYMBOL_TYPE, rdx);
   4594     Split(equal, if_true, if_false, fall_through);
   4595   } else if (check->Equals(isolate()->heap()->boolean_string())) {
   4596     __ CompareRoot(rax, Heap::kTrueValueRootIndex);
   4597     __ j(equal, if_true);
   4598     __ CompareRoot(rax, Heap::kFalseValueRootIndex);
   4599     Split(equal, if_true, if_false, fall_through);
   4600   } else if (FLAG_harmony_typeof &&
   4601              check->Equals(isolate()->heap()->null_string())) {
   4602     __ CompareRoot(rax, Heap::kNullValueRootIndex);
   4603     Split(equal, if_true, if_false, fall_through);
   4604   } else if (check->Equals(isolate()->heap()->undefined_string())) {
   4605     __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
   4606     __ j(equal, if_true);
   4607     __ JumpIfSmi(rax, if_false);
   4608     // Check for undetectable objects => true.
   4609     __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
   4610     __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
   4611              Immediate(1 << Map::kIsUndetectable));
   4612     Split(not_zero, if_true, if_false, fall_through);
   4613   } else if (check->Equals(isolate()->heap()->function_string())) {
   4614     __ JumpIfSmi(rax, if_false);
   4615     STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
   4616     __ CmpObjectType(rax, JS_FUNCTION_TYPE, rdx);
   4617     __ j(equal, if_true);
   4618     __ CmpInstanceType(rdx, JS_FUNCTION_PROXY_TYPE);
   4619     Split(equal, if_true, if_false, fall_through);
   4620   } else if (check->Equals(isolate()->heap()->object_string())) {
   4621     __ JumpIfSmi(rax, if_false);
   4622     if (!FLAG_harmony_typeof) {
   4623       __ CompareRoot(rax, Heap::kNullValueRootIndex);
   4624       __ j(equal, if_true);
   4625     }
   4626     __ CmpObjectType(rax, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, rdx);
   4627     __ j(below, if_false);
   4628     __ CmpInstanceType(rdx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
   4629     __ j(above, if_false);
   4630     // Check for undetectable objects => false.
   4631     __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
   4632              Immediate(1 << Map::kIsUndetectable));
   4633     Split(zero, if_true, if_false, fall_through);
   4634   } else {
   4635     if (if_false != fall_through) __ jmp(if_false);
   4636   }
   4637   context()->Plug(if_true, if_false);
   4638 }
   4639 
   4640 
   4641 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
   4642   Comment cmnt(masm_, "[ CompareOperation");
   4643   SetSourcePosition(expr->position());
   4644 
   4645   // First we try a fast inlined version of the compare when one of
   4646   // the operands is a literal.
   4647   if (TryLiteralCompare(expr)) return;
   4648 
   4649   // Always perform the comparison for its control flow.  Pack the result
   4650   // into the expression's context after the comparison is performed.
   4651   Label materialize_true, materialize_false;
   4652   Label* if_true = NULL;
   4653   Label* if_false = NULL;
   4654   Label* fall_through = NULL;
   4655   context()->PrepareTest(&materialize_true, &materialize_false,
   4656                          &if_true, &if_false, &fall_through);
   4657 
   4658   Token::Value op = expr->op();
   4659   VisitForStackValue(expr->left());
   4660   switch (op) {
   4661     case Token::IN:
   4662       VisitForStackValue(expr->right());
   4663       __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
   4664       PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
   4665       __ CompareRoot(rax, Heap::kTrueValueRootIndex);
   4666       Split(equal, if_true, if_false, fall_through);
   4667       break;
   4668 
   4669     case Token::INSTANCEOF: {
   4670       VisitForStackValue(expr->right());
   4671       InstanceofStub stub(InstanceofStub::kNoFlags);
   4672       __ CallStub(&stub);
   4673       PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   4674       __ testq(rax, rax);
   4675        // The stub returns 0 for true.
   4676       Split(zero, if_true, if_false, fall_through);
   4677       break;
   4678     }
   4679 
   4680     default: {
   4681       VisitForAccumulatorValue(expr->right());
   4682       Condition cc = CompareIC::ComputeCondition(op);
   4683       __ pop(rdx);
   4684 
   4685       bool inline_smi_code = ShouldInlineSmiCase(op);
   4686       JumpPatchSite patch_site(masm_);
   4687       if (inline_smi_code) {
   4688         Label slow_case;
   4689         __ movq(rcx, rdx);
   4690         __ or_(rcx, rax);
   4691         patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
   4692         __ cmpq(rdx, rax);
   4693         Split(cc, if_true, if_false, NULL);
   4694         __ bind(&slow_case);
   4695       }
   4696 
   4697       // Record position and call the compare IC.
   4698       SetSourcePosition(expr->position());
   4699       Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
   4700       CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId());
   4701       patch_site.EmitPatchInfo();
   4702 
   4703       PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   4704       __ testq(rax, rax);
   4705       Split(cc, if_true, if_false, fall_through);
   4706     }
   4707   }
   4708 
   4709   // Convert the result of the comparison into one expected for this
   4710   // expression's context.
   4711   context()->Plug(if_true, if_false);
   4712 }
   4713 
   4714 
   4715 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
   4716                                               Expression* sub_expr,
   4717                                               NilValue nil) {
   4718   Label materialize_true, materialize_false;
   4719   Label* if_true = NULL;
   4720   Label* if_false = NULL;
   4721   Label* fall_through = NULL;
   4722   context()->PrepareTest(&materialize_true, &materialize_false,
   4723                          &if_true, &if_false, &fall_through);
   4724 
   4725   VisitForAccumulatorValue(sub_expr);
   4726   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   4727   if (expr->op() == Token::EQ_STRICT) {
   4728     Heap::RootListIndex nil_value = nil == kNullValue ?
   4729         Heap::kNullValueRootIndex :
   4730         Heap::kUndefinedValueRootIndex;
   4731     __ CompareRoot(rax, nil_value);
   4732     Split(equal, if_true, if_false, fall_through);
   4733   } else {
   4734     Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
   4735     CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId());
   4736     __ testq(rax, rax);
   4737     Split(not_zero, if_true, if_false, fall_through);
   4738   }
   4739   context()->Plug(if_true, if_false);
   4740 }
   4741 
   4742 
   4743 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
   4744   __ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
   4745   context()->Plug(rax);
   4746 }
   4747 
   4748 
   4749 Register FullCodeGenerator::result_register() {
   4750   return rax;
   4751 }
   4752 
   4753 
   4754 Register FullCodeGenerator::context_register() {
   4755   return rsi;
   4756 }
   4757 
   4758 
   4759 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
   4760   ASSERT(IsAligned(frame_offset, kPointerSize));
   4761   __ movq(Operand(rbp, frame_offset), value);
   4762 }
   4763 
   4764 
   4765 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
   4766   __ movq(dst, ContextOperand(rsi, context_index));
   4767 }
   4768 
   4769 
   4770 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
   4771   Scope* declaration_scope = scope()->DeclarationScope();
   4772   if (declaration_scope->is_global_scope() ||
   4773       declaration_scope->is_module_scope()) {
   4774     // Contexts nested in the native context have a canonical empty function
   4775     // as their closure, not the anonymous closure containing the global
   4776     // code.  Pass a smi sentinel and let the runtime look up the empty
   4777     // function.
   4778     __ Push(Smi::FromInt(0));
   4779   } else if (declaration_scope->is_eval_scope()) {
   4780     // Contexts created by a call to eval have the same closure as the
   4781     // context calling eval, not the anonymous closure containing the eval
   4782     // code.  Fetch it from the context.
   4783     __ push(ContextOperand(rsi, Context::CLOSURE_INDEX));
   4784   } else {
   4785     ASSERT(declaration_scope->is_function_scope());
   4786     __ push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
   4787   }
   4788 }
   4789 
   4790 
   4791 // ----------------------------------------------------------------------------
   4792 // Non-local control flow support.
   4793 
   4794 
   4795 void FullCodeGenerator::EnterFinallyBlock() {
   4796   ASSERT(!result_register().is(rdx));
   4797   ASSERT(!result_register().is(rcx));
   4798   // Cook return address on top of stack (smi encoded Code* delta)
   4799   __ PopReturnAddressTo(rdx);
   4800   __ Move(rcx, masm_->CodeObject());
   4801   __ subq(rdx, rcx);
   4802   __ Integer32ToSmi(rdx, rdx);
   4803   __ push(rdx);
   4804 
   4805   // Store result register while executing finally block.
   4806   __ push(result_register());
   4807 
   4808   // Store pending message while executing finally block.
   4809   ExternalReference pending_message_obj =
   4810       ExternalReference::address_of_pending_message_obj(isolate());
   4811   __ Load(rdx, pending_message_obj);
   4812   __ push(rdx);
   4813 
   4814   ExternalReference has_pending_message =
   4815       ExternalReference::address_of_has_pending_message(isolate());
   4816   __ Load(rdx, has_pending_message);
   4817   __ Integer32ToSmi(rdx, rdx);
   4818   __ push(rdx);
   4819 
   4820   ExternalReference pending_message_script =
   4821       ExternalReference::address_of_pending_message_script(isolate());
   4822   __ Load(rdx, pending_message_script);
   4823   __ push(rdx);
   4824 }
   4825 
   4826 
   4827 void FullCodeGenerator::ExitFinallyBlock() {
   4828   ASSERT(!result_register().is(rdx));
   4829   ASSERT(!result_register().is(rcx));
   4830   // Restore pending message from stack.
   4831   __ pop(rdx);
   4832   ExternalReference pending_message_script =
   4833       ExternalReference::address_of_pending_message_script(isolate());
   4834   __ Store(pending_message_script, rdx);
   4835 
   4836   __ pop(rdx);
   4837   __ SmiToInteger32(rdx, rdx);
   4838   ExternalReference has_pending_message =
   4839       ExternalReference::address_of_has_pending_message(isolate());
   4840   __ Store(has_pending_message, rdx);
   4841 
   4842   __ pop(rdx);
   4843   ExternalReference pending_message_obj =
   4844       ExternalReference::address_of_pending_message_obj(isolate());
   4845   __ Store(pending_message_obj, rdx);
   4846 
   4847   // Restore result register from stack.
   4848   __ pop(result_register());
   4849 
   4850   // Uncook return address.
   4851   __ pop(rdx);
   4852   __ SmiToInteger32(rdx, rdx);
   4853   __ Move(rcx, masm_->CodeObject());
   4854   __ addq(rdx, rcx);
   4855   __ jmp(rdx);
   4856 }
   4857 
   4858 
   4859 #undef __
   4860 
   4861 #define __ ACCESS_MASM(masm())
   4862 
   4863 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
   4864     int* stack_depth,
   4865     int* context_length) {
   4866   // The macros used here must preserve the result register.
   4867 
   4868   // Because the handler block contains the context of the finally
   4869   // code, we can restore it directly from there for the finally code
   4870   // rather than iteratively unwinding contexts via their previous
   4871   // links.
   4872   __ Drop(*stack_depth);  // Down to the handler block.
   4873   if (*context_length > 0) {
   4874     // Restore the context to its dedicated register and the stack.
   4875     __ movq(rsi, Operand(rsp, StackHandlerConstants::kContextOffset));
   4876     __ movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
   4877   }
   4878   __ PopTryHandler();
   4879   __ call(finally_entry_);
   4880 
   4881   *stack_depth = 0;
   4882   *context_length = 0;
   4883   return previous_;
   4884 }
   4885 
   4886 
   4887 #undef __
   4888 
   4889 } }  // namespace v8::internal
   4890 
   4891 #endif  // V8_TARGET_ARCH_X64
   4892