Home | History | Annotate | Download | only in ia32
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 
     28 #include "v8.h"
     29 
     30 #if V8_TARGET_ARCH_IA32
     31 
     32 #include "code-stubs.h"
     33 #include "codegen.h"
     34 #include "compiler.h"
     35 #include "debug.h"
     36 #include "full-codegen.h"
     37 #include "isolate-inl.h"
     38 #include "parser.h"
     39 #include "scopes.h"
     40 #include "stub-cache.h"
     41 
     42 namespace v8 {
     43 namespace internal {
     44 
     45 #define __ ACCESS_MASM(masm_)
     46 
     47 
     48 class JumpPatchSite BASE_EMBEDDED {
     49  public:
     50   explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
     51 #ifdef DEBUG
     52     info_emitted_ = false;
     53 #endif
     54   }
     55 
     56   ~JumpPatchSite() {
     57     ASSERT(patch_site_.is_bound() == info_emitted_);
     58   }
     59 
     60   void EmitJumpIfNotSmi(Register reg,
     61                         Label* target,
     62                         Label::Distance distance = Label::kFar) {
     63     __ test(reg, Immediate(kSmiTagMask));
     64     EmitJump(not_carry, target, distance);  // Always taken before patched.
     65   }
     66 
     67   void EmitJumpIfSmi(Register reg,
     68                      Label* target,
     69                      Label::Distance distance = Label::kFar) {
     70     __ test(reg, Immediate(kSmiTagMask));
     71     EmitJump(carry, target, distance);  // Never taken before patched.
     72   }
     73 
     74   void EmitPatchInfo() {
     75     if (patch_site_.is_bound()) {
     76       int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
     77       ASSERT(is_int8(delta_to_patch_site));
     78       __ test(eax, Immediate(delta_to_patch_site));
     79 #ifdef DEBUG
     80       info_emitted_ = true;
     81 #endif
     82     } else {
     83       __ nop();  // Signals no inlined code.
     84     }
     85   }
     86 
     87  private:
     88   // jc will be patched with jz, jnc will become jnz.
     89   void EmitJump(Condition cc, Label* target, Label::Distance distance) {
     90     ASSERT(!patch_site_.is_bound() && !info_emitted_);
     91     ASSERT(cc == carry || cc == not_carry);
     92     __ bind(&patch_site_);
     93     __ j(cc, target, distance);
     94   }
     95 
     96   MacroAssembler* masm_;
     97   Label patch_site_;
     98 #ifdef DEBUG
     99   bool info_emitted_;
    100 #endif
    101 };
    102 
    103 
    104 // Generate code for a JS function.  On entry to the function the receiver
    105 // and arguments have been pushed on the stack left to right, with the
    106 // return address on top of them.  The actual argument count matches the
    107 // formal parameter count expected by the function.
    108 //
    109 // The live registers are:
    110 //   o ecx: CallKind
    111 //   o edi: the JS function object being called (i.e. ourselves)
    112 //   o esi: our context
    113 //   o ebp: our caller's frame pointer
    114 //   o esp: stack pointer (pointing to return address)
    115 //
    116 // The function builds a JS frame.  Please see JavaScriptFrameConstants in
    117 // frames-ia32.h for its layout.
    118 void FullCodeGenerator::Generate() {
    119   CompilationInfo* info = info_;
    120   handler_table_ =
    121       isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
    122   profiling_counter_ = isolate()->factory()->NewCell(
    123       Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
    124   SetFunctionPosition(function());
    125   Comment cmnt(masm_, "[ function compiled by full code generator");
    126 
    127   ProfileEntryHookStub::MaybeCallEntryHook(masm_);
    128 
    129 #ifdef DEBUG
    130   if (strlen(FLAG_stop_at) > 0 &&
    131       info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
    132     __ int3();
    133   }
    134 #endif
    135 
    136   // Strict mode functions and builtins need to replace the receiver
    137   // with undefined when called as functions (without an explicit
    138   // receiver object). ecx is zero for method calls and non-zero for
    139   // function calls.
    140   if (!info->is_classic_mode() || info->is_native()) {
    141     Label ok;
    142     __ test(ecx, ecx);
    143     __ j(zero, &ok, Label::kNear);
    144     // +1 for return address.
    145     int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize;
    146     __ mov(ecx, Operand(esp, receiver_offset));
    147     __ JumpIfSmi(ecx, &ok);
    148     __ CmpObjectType(ecx, JS_GLOBAL_PROXY_TYPE, ecx);
    149     __ j(not_equal, &ok, Label::kNear);
    150     __ mov(Operand(esp, receiver_offset),
    151            Immediate(isolate()->factory()->undefined_value()));
    152     __ bind(&ok);
    153   }
    154 
    155   // Open a frame scope to indicate that there is a frame on the stack.  The
    156   // MANUAL indicates that the scope shouldn't actually generate code to set up
    157   // the frame (that is done below).
    158   FrameScope frame_scope(masm_, StackFrame::MANUAL);
    159 
    160   info->set_prologue_offset(masm_->pc_offset());
    161   __ push(ebp);  // Caller's frame pointer.
    162   __ mov(ebp, esp);
    163   __ push(esi);  // Callee's context.
    164   __ push(edi);  // Callee's JS Function.
    165   info->AddNoFrameRange(0, masm_->pc_offset());
    166 
    167   { Comment cmnt(masm_, "[ Allocate locals");
    168     int locals_count = info->scope()->num_stack_slots();
    169     // Generators allocate locals, if any, in context slots.
    170     ASSERT(!info->function()->is_generator() || locals_count == 0);
    171     if (locals_count == 1) {
    172       __ push(Immediate(isolate()->factory()->undefined_value()));
    173     } else if (locals_count > 1) {
    174       __ mov(eax, Immediate(isolate()->factory()->undefined_value()));
    175       for (int i = 0; i < locals_count; i++) {
    176         __ push(eax);
    177       }
    178     }
    179   }
    180 
    181   bool function_in_register = true;
    182 
    183   // Possibly allocate a local context.
    184   int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
    185   if (heap_slots > 0) {
    186     Comment cmnt(masm_, "[ Allocate context");
    187     // Argument to NewContext is the function, which is still in edi.
    188     __ push(edi);
    189     if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
    190       __ Push(info->scope()->GetScopeInfo());
    191       __ CallRuntime(Runtime::kNewGlobalContext, 2);
    192     } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
    193       FastNewContextStub stub(heap_slots);
    194       __ CallStub(&stub);
    195     } else {
    196       __ CallRuntime(Runtime::kNewFunctionContext, 1);
    197     }
    198     function_in_register = false;
    199     // Context is returned in both eax and esi.  It replaces the context
    200     // passed to us.  It's saved in the stack and kept live in esi.
    201     __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
    202 
    203     // Copy parameters into context if necessary.
    204     int num_parameters = info->scope()->num_parameters();
    205     for (int i = 0; i < num_parameters; i++) {
    206       Variable* var = scope()->parameter(i);
    207       if (var->IsContextSlot()) {
    208         int parameter_offset = StandardFrameConstants::kCallerSPOffset +
    209             (num_parameters - 1 - i) * kPointerSize;
    210         // Load parameter from stack.
    211         __ mov(eax, Operand(ebp, parameter_offset));
    212         // Store it in the context.
    213         int context_offset = Context::SlotOffset(var->index());
    214         __ mov(Operand(esi, context_offset), eax);
    215         // Update the write barrier. This clobbers eax and ebx.
    216         __ RecordWriteContextSlot(esi,
    217                                   context_offset,
    218                                   eax,
    219                                   ebx,
    220                                   kDontSaveFPRegs);
    221       }
    222     }
    223   }
    224 
    225   Variable* arguments = scope()->arguments();
    226   if (arguments != NULL) {
    227     // Function uses arguments object.
    228     Comment cmnt(masm_, "[ Allocate arguments object");
    229     if (function_in_register) {
    230       __ push(edi);
    231     } else {
    232       __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
    233     }
    234     // Receiver is just before the parameters on the caller's stack.
    235     int num_parameters = info->scope()->num_parameters();
    236     int offset = num_parameters * kPointerSize;
    237     __ lea(edx,
    238            Operand(ebp, StandardFrameConstants::kCallerSPOffset + offset));
    239     __ push(edx);
    240     __ push(Immediate(Smi::FromInt(num_parameters)));
    241     // Arguments to ArgumentsAccessStub:
    242     //   function, receiver address, parameter count.
    243     // The stub will rewrite receiver and parameter count if the previous
    244     // stack frame was an arguments adapter frame.
    245     ArgumentsAccessStub::Type type;
    246     if (!is_classic_mode()) {
    247       type = ArgumentsAccessStub::NEW_STRICT;
    248     } else if (function()->has_duplicate_parameters()) {
    249       type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW;
    250     } else {
    251       type = ArgumentsAccessStub::NEW_NON_STRICT_FAST;
    252     }
    253     ArgumentsAccessStub stub(type);
    254     __ CallStub(&stub);
    255 
    256     SetVar(arguments, eax, ebx, edx);
    257   }
    258 
    259   if (FLAG_trace) {
    260     __ CallRuntime(Runtime::kTraceEnter, 0);
    261   }
    262 
    263   // Visit the declarations and body unless there is an illegal
    264   // redeclaration.
    265   if (scope()->HasIllegalRedeclaration()) {
    266     Comment cmnt(masm_, "[ Declarations");
    267     scope()->VisitIllegalRedeclaration(this);
    268 
    269   } else {
    270     PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
    271     { Comment cmnt(masm_, "[ Declarations");
    272       // For named function expressions, declare the function name as a
    273       // constant.
    274       if (scope()->is_function_scope() && scope()->function() != NULL) {
    275         VariableDeclaration* function = scope()->function();
    276         ASSERT(function->proxy()->var()->mode() == CONST ||
    277                function->proxy()->var()->mode() == CONST_HARMONY);
    278         ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
    279         VisitVariableDeclaration(function);
    280       }
    281       VisitDeclarations(scope()->declarations());
    282     }
    283 
    284     { Comment cmnt(masm_, "[ Stack check");
    285       PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
    286       Label ok;
    287       ExternalReference stack_limit =
    288           ExternalReference::address_of_stack_limit(isolate());
    289       __ cmp(esp, Operand::StaticVariable(stack_limit));
    290       __ j(above_equal, &ok, Label::kNear);
    291       StackCheckStub stub;
    292       __ CallStub(&stub);
    293       __ bind(&ok);
    294     }
    295 
    296     { Comment cmnt(masm_, "[ Body");
    297       ASSERT(loop_depth() == 0);
    298       VisitStatements(function()->body());
    299       ASSERT(loop_depth() == 0);
    300     }
    301   }
    302 
    303   // Always emit a 'return undefined' in case control fell off the end of
    304   // the body.
    305   { Comment cmnt(masm_, "[ return <undefined>;");
    306     __ mov(eax, isolate()->factory()->undefined_value());
    307     EmitReturnSequence();
    308   }
    309 }
    310 
    311 
    312 void FullCodeGenerator::ClearAccumulator() {
    313   __ Set(eax, Immediate(Smi::FromInt(0)));
    314 }
    315 
    316 
    317 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
    318   __ mov(ebx, Immediate(profiling_counter_));
    319   __ sub(FieldOperand(ebx, Cell::kValueOffset),
    320          Immediate(Smi::FromInt(delta)));
    321 }
    322 
    323 
    324 void FullCodeGenerator::EmitProfilingCounterReset() {
    325   int reset_value = FLAG_interrupt_budget;
    326   if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) {
    327     // Self-optimization is a one-off thing: if it fails, don't try again.
    328     reset_value = Smi::kMaxValue;
    329   }
    330   __ mov(ebx, Immediate(profiling_counter_));
    331   __ mov(FieldOperand(ebx, Cell::kValueOffset),
    332          Immediate(Smi::FromInt(reset_value)));
    333 }
    334 
    335 
    336 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
    337                                                 Label* back_edge_target) {
    338   Comment cmnt(masm_, "[ Back edge bookkeeping");
    339   Label ok;
    340 
    341   int weight = 1;
    342   if (FLAG_weighted_back_edges) {
    343     ASSERT(back_edge_target->is_bound());
    344     int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
    345     weight = Min(kMaxBackEdgeWeight,
    346                  Max(1, distance / kCodeSizeMultiplier));
    347   }
    348   EmitProfilingCounterDecrement(weight);
    349   __ j(positive, &ok, Label::kNear);
    350   InterruptStub stub;
    351   __ CallStub(&stub);
    352 
    353   // Record a mapping of this PC offset to the OSR id.  This is used to find
    354   // the AST id from the unoptimized code in order to use it as a key into
    355   // the deoptimization input data found in the optimized code.
    356   RecordBackEdge(stmt->OsrEntryId());
    357 
    358   EmitProfilingCounterReset();
    359 
    360   __ bind(&ok);
    361   PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
    362   // Record a mapping of the OSR id to this PC.  This is used if the OSR
    363   // entry becomes the target of a bailout.  We don't expect it to be, but
    364   // we want it to work if it is.
    365   PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
    366 }
    367 
    368 
    369 void FullCodeGenerator::EmitReturnSequence() {
    370   Comment cmnt(masm_, "[ Return sequence");
    371   if (return_label_.is_bound()) {
    372     __ jmp(&return_label_);
    373   } else {
    374     // Common return label
    375     __ bind(&return_label_);
    376     if (FLAG_trace) {
    377       __ push(eax);
    378       __ CallRuntime(Runtime::kTraceExit, 1);
    379     }
    380     if (FLAG_interrupt_at_exit || FLAG_self_optimization) {
    381       // Pretend that the exit is a backwards jump to the entry.
    382       int weight = 1;
    383       if (info_->ShouldSelfOptimize()) {
    384         weight = FLAG_interrupt_budget / FLAG_self_opt_count;
    385       } else if (FLAG_weighted_back_edges) {
    386         int distance = masm_->pc_offset();
    387         weight = Min(kMaxBackEdgeWeight,
    388                      Max(1, distance / kCodeSizeMultiplier));
    389       }
    390       EmitProfilingCounterDecrement(weight);
    391       Label ok;
    392       __ j(positive, &ok, Label::kNear);
    393       __ push(eax);
    394       if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) {
    395         __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
    396         __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
    397       } else {
    398         InterruptStub stub;
    399         __ CallStub(&stub);
    400       }
    401       __ pop(eax);
    402       EmitProfilingCounterReset();
    403       __ bind(&ok);
    404     }
    405 #ifdef DEBUG
    406     // Add a label for checking the size of the code used for returning.
    407     Label check_exit_codesize;
    408     masm_->bind(&check_exit_codesize);
    409 #endif
    410     SetSourcePosition(function()->end_position() - 1);
    411     __ RecordJSReturn();
    412     // Do not use the leave instruction here because it is too short to
    413     // patch with the code required by the debugger.
    414     __ mov(esp, ebp);
    415     int no_frame_start = masm_->pc_offset();
    416     __ pop(ebp);
    417 
    418     int arguments_bytes = (info_->scope()->num_parameters() + 1) * kPointerSize;
    419     __ Ret(arguments_bytes, ecx);
    420 #ifdef ENABLE_DEBUGGER_SUPPORT
    421     // Check that the size of the code used for returning is large enough
    422     // for the debugger's requirements.
    423     ASSERT(Assembler::kJSReturnSequenceLength <=
    424            masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
    425 #endif
    426     info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
    427   }
    428 }
    429 
    430 
    431 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
    432   ASSERT(var->IsStackAllocated() || var->IsContextSlot());
    433 }
    434 
    435 
    436 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
    437   ASSERT(var->IsStackAllocated() || var->IsContextSlot());
    438   codegen()->GetVar(result_register(), var);
    439 }
    440 
    441 
    442 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
    443   ASSERT(var->IsStackAllocated() || var->IsContextSlot());
    444   MemOperand operand = codegen()->VarOperand(var, result_register());
    445   // Memory operands can be pushed directly.
    446   __ push(operand);
    447 }
    448 
    449 
    450 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
    451   // For simplicity we always test the accumulator register.
    452   codegen()->GetVar(result_register(), var);
    453   codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
    454   codegen()->DoTest(this);
    455 }
    456 
    457 
    458 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
    459   UNREACHABLE();  // Not used on IA32.
    460 }
    461 
    462 
    463 void FullCodeGenerator::AccumulatorValueContext::Plug(
    464     Heap::RootListIndex index) const {
    465   UNREACHABLE();  // Not used on IA32.
    466 }
    467 
    468 
    469 void FullCodeGenerator::StackValueContext::Plug(
    470     Heap::RootListIndex index) const {
    471   UNREACHABLE();  // Not used on IA32.
    472 }
    473 
    474 
    475 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
    476   UNREACHABLE();  // Not used on IA32.
    477 }
    478 
    479 
    480 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
    481 }
    482 
    483 
    484 void FullCodeGenerator::AccumulatorValueContext::Plug(
    485     Handle<Object> lit) const {
    486   if (lit->IsSmi()) {
    487     __ SafeSet(result_register(), Immediate(lit));
    488   } else {
    489     __ Set(result_register(), Immediate(lit));
    490   }
    491 }
    492 
    493 
    494 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
    495   if (lit->IsSmi()) {
    496     __ SafePush(Immediate(lit));
    497   } else {
    498     __ push(Immediate(lit));
    499   }
    500 }
    501 
    502 
    503 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
    504   codegen()->PrepareForBailoutBeforeSplit(condition(),
    505                                           true,
    506                                           true_label_,
    507                                           false_label_);
    508   ASSERT(!lit->IsUndetectableObject());  // There are no undetectable literals.
    509   if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
    510     if (false_label_ != fall_through_) __ jmp(false_label_);
    511   } else if (lit->IsTrue() || lit->IsJSObject()) {
    512     if (true_label_ != fall_through_) __ jmp(true_label_);
    513   } else if (lit->IsString()) {
    514     if (String::cast(*lit)->length() == 0) {
    515       if (false_label_ != fall_through_) __ jmp(false_label_);
    516     } else {
    517       if (true_label_ != fall_through_) __ jmp(true_label_);
    518     }
    519   } else if (lit->IsSmi()) {
    520     if (Smi::cast(*lit)->value() == 0) {
    521       if (false_label_ != fall_through_) __ jmp(false_label_);
    522     } else {
    523       if (true_label_ != fall_through_) __ jmp(true_label_);
    524     }
    525   } else {
    526     // For simplicity we always test the accumulator register.
    527     __ mov(result_register(), lit);
    528     codegen()->DoTest(this);
    529   }
    530 }
    531 
    532 
    533 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
    534                                                    Register reg) const {
    535   ASSERT(count > 0);
    536   __ Drop(count);
    537 }
    538 
    539 
    540 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
    541     int count,
    542     Register reg) const {
    543   ASSERT(count > 0);
    544   __ Drop(count);
    545   __ Move(result_register(), reg);
    546 }
    547 
    548 
    549 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
    550                                                        Register reg) const {
    551   ASSERT(count > 0);
    552   if (count > 1) __ Drop(count - 1);
    553   __ mov(Operand(esp, 0), reg);
    554 }
    555 
    556 
    557 void FullCodeGenerator::TestContext::DropAndPlug(int count,
    558                                                  Register reg) const {
    559   ASSERT(count > 0);
    560   // For simplicity we always test the accumulator register.
    561   __ Drop(count);
    562   __ Move(result_register(), reg);
    563   codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
    564   codegen()->DoTest(this);
    565 }
    566 
    567 
    568 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
    569                                             Label* materialize_false) const {
    570   ASSERT(materialize_true == materialize_false);
    571   __ bind(materialize_true);
    572 }
    573 
    574 
    575 void FullCodeGenerator::AccumulatorValueContext::Plug(
    576     Label* materialize_true,
    577     Label* materialize_false) const {
    578   Label done;
    579   __ bind(materialize_true);
    580   __ mov(result_register(), isolate()->factory()->true_value());
    581   __ jmp(&done, Label::kNear);
    582   __ bind(materialize_false);
    583   __ mov(result_register(), isolate()->factory()->false_value());
    584   __ bind(&done);
    585 }
    586 
    587 
    588 void FullCodeGenerator::StackValueContext::Plug(
    589     Label* materialize_true,
    590     Label* materialize_false) const {
    591   Label done;
    592   __ bind(materialize_true);
    593   __ push(Immediate(isolate()->factory()->true_value()));
    594   __ jmp(&done, Label::kNear);
    595   __ bind(materialize_false);
    596   __ push(Immediate(isolate()->factory()->false_value()));
    597   __ bind(&done);
    598 }
    599 
    600 
    601 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
    602                                           Label* materialize_false) const {
    603   ASSERT(materialize_true == true_label_);
    604   ASSERT(materialize_false == false_label_);
    605 }
    606 
    607 
    608 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
    609 }
    610 
    611 
    612 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
    613   Handle<Object> value = flag
    614       ? isolate()->factory()->true_value()
    615       : isolate()->factory()->false_value();
    616   __ mov(result_register(), value);
    617 }
    618 
    619 
    620 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
    621   Handle<Object> value = flag
    622       ? isolate()->factory()->true_value()
    623       : isolate()->factory()->false_value();
    624   __ push(Immediate(value));
    625 }
    626 
    627 
    628 void FullCodeGenerator::TestContext::Plug(bool flag) const {
    629   codegen()->PrepareForBailoutBeforeSplit(condition(),
    630                                           true,
    631                                           true_label_,
    632                                           false_label_);
    633   if (flag) {
    634     if (true_label_ != fall_through_) __ jmp(true_label_);
    635   } else {
    636     if (false_label_ != fall_through_) __ jmp(false_label_);
    637   }
    638 }
    639 
    640 
    641 void FullCodeGenerator::DoTest(Expression* condition,
    642                                Label* if_true,
    643                                Label* if_false,
    644                                Label* fall_through) {
    645   Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
    646   CallIC(ic, RelocInfo::CODE_TARGET, condition->test_id());
    647   __ test(result_register(), result_register());
    648   // The stub returns nonzero for true.
    649   Split(not_zero, if_true, if_false, fall_through);
    650 }
    651 
    652 
    653 void FullCodeGenerator::Split(Condition cc,
    654                               Label* if_true,
    655                               Label* if_false,
    656                               Label* fall_through) {
    657   if (if_false == fall_through) {
    658     __ j(cc, if_true);
    659   } else if (if_true == fall_through) {
    660     __ j(NegateCondition(cc), if_false);
    661   } else {
    662     __ j(cc, if_true);
    663     __ jmp(if_false);
    664   }
    665 }
    666 
    667 
    668 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
    669   ASSERT(var->IsStackAllocated());
    670   // Offset is negative because higher indexes are at lower addresses.
    671   int offset = -var->index() * kPointerSize;
    672   // Adjust by a (parameter or local) base offset.
    673   if (var->IsParameter()) {
    674     offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
    675   } else {
    676     offset += JavaScriptFrameConstants::kLocal0Offset;
    677   }
    678   return Operand(ebp, offset);
    679 }
    680 
    681 
    682 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
    683   ASSERT(var->IsContextSlot() || var->IsStackAllocated());
    684   if (var->IsContextSlot()) {
    685     int context_chain_length = scope()->ContextChainLength(var->scope());
    686     __ LoadContext(scratch, context_chain_length);
    687     return ContextOperand(scratch, var->index());
    688   } else {
    689     return StackOperand(var);
    690   }
    691 }
    692 
    693 
    694 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
    695   ASSERT(var->IsContextSlot() || var->IsStackAllocated());
    696   MemOperand location = VarOperand(var, dest);
    697   __ mov(dest, location);
    698 }
    699 
    700 
    701 void FullCodeGenerator::SetVar(Variable* var,
    702                                Register src,
    703                                Register scratch0,
    704                                Register scratch1) {
    705   ASSERT(var->IsContextSlot() || var->IsStackAllocated());
    706   ASSERT(!scratch0.is(src));
    707   ASSERT(!scratch0.is(scratch1));
    708   ASSERT(!scratch1.is(src));
    709   MemOperand location = VarOperand(var, scratch0);
    710   __ mov(location, src);
    711 
    712   // Emit the write barrier code if the location is in the heap.
    713   if (var->IsContextSlot()) {
    714     int offset = Context::SlotOffset(var->index());
    715     ASSERT(!scratch0.is(esi) && !src.is(esi) && !scratch1.is(esi));
    716     __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
    717   }
    718 }
    719 
    720 
    721 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
    722                                                      bool should_normalize,
    723                                                      Label* if_true,
    724                                                      Label* if_false) {
    725   // Only prepare for bailouts before splits if we're in a test
    726   // context. Otherwise, we let the Visit function deal with the
    727   // preparation to avoid preparing with the same AST id twice.
    728   if (!context()->IsTest() || !info_->IsOptimizable()) return;
    729 
    730   Label skip;
    731   if (should_normalize) __ jmp(&skip, Label::kNear);
    732   PrepareForBailout(expr, TOS_REG);
    733   if (should_normalize) {
    734     __ cmp(eax, isolate()->factory()->true_value());
    735     Split(equal, if_true, if_false, NULL);
    736     __ bind(&skip);
    737   }
    738 }
    739 
    740 
    741 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
    742   // The variable in the declaration always resides in the current context.
    743   ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
    744   if (generate_debug_code_) {
    745     // Check that we're not inside a with or catch context.
    746     __ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset));
    747     __ cmp(ebx, isolate()->factory()->with_context_map());
    748     __ Check(not_equal, kDeclarationInWithContext);
    749     __ cmp(ebx, isolate()->factory()->catch_context_map());
    750     __ Check(not_equal, kDeclarationInCatchContext);
    751   }
    752 }
    753 
    754 
    755 void FullCodeGenerator::VisitVariableDeclaration(
    756     VariableDeclaration* declaration) {
    757   // If it was not possible to allocate the variable at compile time, we
    758   // need to "declare" it at runtime to make sure it actually exists in the
    759   // local context.
    760   VariableProxy* proxy = declaration->proxy();
    761   VariableMode mode = declaration->mode();
    762   Variable* variable = proxy->var();
    763   bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET;
    764   switch (variable->location()) {
    765     case Variable::UNALLOCATED:
    766       globals_->Add(variable->name(), zone());
    767       globals_->Add(variable->binding_needs_init()
    768                         ? isolate()->factory()->the_hole_value()
    769                         : isolate()->factory()->undefined_value(), zone());
    770       break;
    771 
    772     case Variable::PARAMETER:
    773     case Variable::LOCAL:
    774       if (hole_init) {
    775         Comment cmnt(masm_, "[ VariableDeclaration");
    776         __ mov(StackOperand(variable),
    777                Immediate(isolate()->factory()->the_hole_value()));
    778       }
    779       break;
    780 
    781     case Variable::CONTEXT:
    782       if (hole_init) {
    783         Comment cmnt(masm_, "[ VariableDeclaration");
    784         EmitDebugCheckDeclarationContext(variable);
    785         __ mov(ContextOperand(esi, variable->index()),
    786                Immediate(isolate()->factory()->the_hole_value()));
    787         // No write barrier since the hole value is in old space.
    788         PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
    789       }
    790       break;
    791 
    792     case Variable::LOOKUP: {
    793       Comment cmnt(masm_, "[ VariableDeclaration");
    794       __ push(esi);
    795       __ push(Immediate(variable->name()));
    796       // VariableDeclaration nodes are always introduced in one of four modes.
    797       ASSERT(IsDeclaredVariableMode(mode));
    798       PropertyAttributes attr =
    799           IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
    800       __ push(Immediate(Smi::FromInt(attr)));
    801       // Push initial value, if any.
    802       // Note: For variables we must not push an initial value (such as
    803       // 'undefined') because we may have a (legal) redeclaration and we
    804       // must not destroy the current value.
    805       if (hole_init) {
    806         __ push(Immediate(isolate()->factory()->the_hole_value()));
    807       } else {
    808         __ push(Immediate(Smi::FromInt(0)));  // Indicates no initial value.
    809       }
    810       __ CallRuntime(Runtime::kDeclareContextSlot, 4);
    811       break;
    812     }
    813   }
    814 }
    815 
    816 
    817 void FullCodeGenerator::VisitFunctionDeclaration(
    818     FunctionDeclaration* declaration) {
    819   VariableProxy* proxy = declaration->proxy();
    820   Variable* variable = proxy->var();
    821   switch (variable->location()) {
    822     case Variable::UNALLOCATED: {
    823       globals_->Add(variable->name(), zone());
    824       Handle<SharedFunctionInfo> function =
    825           Compiler::BuildFunctionInfo(declaration->fun(), script());
    826       // Check for stack-overflow exception.
    827       if (function.is_null()) return SetStackOverflow();
    828       globals_->Add(function, zone());
    829       break;
    830     }
    831 
    832     case Variable::PARAMETER:
    833     case Variable::LOCAL: {
    834       Comment cmnt(masm_, "[ FunctionDeclaration");
    835       VisitForAccumulatorValue(declaration->fun());
    836       __ mov(StackOperand(variable), result_register());
    837       break;
    838     }
    839 
    840     case Variable::CONTEXT: {
    841       Comment cmnt(masm_, "[ FunctionDeclaration");
    842       EmitDebugCheckDeclarationContext(variable);
    843       VisitForAccumulatorValue(declaration->fun());
    844       __ mov(ContextOperand(esi, variable->index()), result_register());
    845       // We know that we have written a function, which is not a smi.
    846       __ RecordWriteContextSlot(esi,
    847                                 Context::SlotOffset(variable->index()),
    848                                 result_register(),
    849                                 ecx,
    850                                 kDontSaveFPRegs,
    851                                 EMIT_REMEMBERED_SET,
    852                                 OMIT_SMI_CHECK);
    853       PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
    854       break;
    855     }
    856 
    857     case Variable::LOOKUP: {
    858       Comment cmnt(masm_, "[ FunctionDeclaration");
    859       __ push(esi);
    860       __ push(Immediate(variable->name()));
    861       __ push(Immediate(Smi::FromInt(NONE)));
    862       VisitForStackValue(declaration->fun());
    863       __ CallRuntime(Runtime::kDeclareContextSlot, 4);
    864       break;
    865     }
    866   }
    867 }
    868 
    869 
    870 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
    871   Variable* variable = declaration->proxy()->var();
    872   ASSERT(variable->location() == Variable::CONTEXT);
    873   ASSERT(variable->interface()->IsFrozen());
    874 
    875   Comment cmnt(masm_, "[ ModuleDeclaration");
    876   EmitDebugCheckDeclarationContext(variable);
    877 
    878   // Load instance object.
    879   __ LoadContext(eax, scope_->ContextChainLength(scope_->GlobalScope()));
    880   __ mov(eax, ContextOperand(eax, variable->interface()->Index()));
    881   __ mov(eax, ContextOperand(eax, Context::EXTENSION_INDEX));
    882 
    883   // Assign it.
    884   __ mov(ContextOperand(esi, variable->index()), eax);
    885   // We know that we have written a module, which is not a smi.
    886   __ RecordWriteContextSlot(esi,
    887                             Context::SlotOffset(variable->index()),
    888                             eax,
    889                             ecx,
    890                             kDontSaveFPRegs,
    891                             EMIT_REMEMBERED_SET,
    892                             OMIT_SMI_CHECK);
    893   PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
    894 
    895   // Traverse into body.
    896   Visit(declaration->module());
    897 }
    898 
    899 
    900 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
    901   VariableProxy* proxy = declaration->proxy();
    902   Variable* variable = proxy->var();
    903   switch (variable->location()) {
    904     case Variable::UNALLOCATED:
    905       // TODO(rossberg)
    906       break;
    907 
    908     case Variable::CONTEXT: {
    909       Comment cmnt(masm_, "[ ImportDeclaration");
    910       EmitDebugCheckDeclarationContext(variable);
    911       // TODO(rossberg)
    912       break;
    913     }
    914 
    915     case Variable::PARAMETER:
    916     case Variable::LOCAL:
    917     case Variable::LOOKUP:
    918       UNREACHABLE();
    919   }
    920 }
    921 
    922 
    923 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
    924   // TODO(rossberg)
    925 }
    926 
    927 
    928 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
    929   // Call the runtime to declare the globals.
    930   __ push(esi);  // The context is the first argument.
    931   __ Push(pairs);
    932   __ Push(Smi::FromInt(DeclareGlobalsFlags()));
    933   __ CallRuntime(Runtime::kDeclareGlobals, 3);
    934   // Return value is ignored.
    935 }
    936 
    937 
    938 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
    939   // Call the runtime to declare the modules.
    940   __ Push(descriptions);
    941   __ CallRuntime(Runtime::kDeclareModules, 1);
    942   // Return value is ignored.
    943 }
    944 
    945 
    946 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
    947   Comment cmnt(masm_, "[ SwitchStatement");
    948   Breakable nested_statement(this, stmt);
    949   SetStatementPosition(stmt);
    950 
    951   // Keep the switch value on the stack until a case matches.
    952   VisitForStackValue(stmt->tag());
    953   PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
    954 
    955   ZoneList<CaseClause*>* clauses = stmt->cases();
    956   CaseClause* default_clause = NULL;  // Can occur anywhere in the list.
    957 
    958   Label next_test;  // Recycled for each test.
    959   // Compile all the tests with branches to their bodies.
    960   for (int i = 0; i < clauses->length(); i++) {
    961     CaseClause* clause = clauses->at(i);
    962     clause->body_target()->Unuse();
    963 
    964     // The default is not a test, but remember it as final fall through.
    965     if (clause->is_default()) {
    966       default_clause = clause;
    967       continue;
    968     }
    969 
    970     Comment cmnt(masm_, "[ Case comparison");
    971     __ bind(&next_test);
    972     next_test.Unuse();
    973 
    974     // Compile the label expression.
    975     VisitForAccumulatorValue(clause->label());
    976 
    977     // Perform the comparison as if via '==='.
    978     __ mov(edx, Operand(esp, 0));  // Switch value.
    979     bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
    980     JumpPatchSite patch_site(masm_);
    981     if (inline_smi_code) {
    982       Label slow_case;
    983       __ mov(ecx, edx);
    984       __ or_(ecx, eax);
    985       patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
    986 
    987       __ cmp(edx, eax);
    988       __ j(not_equal, &next_test);
    989       __ Drop(1);  // Switch value is no longer needed.
    990       __ jmp(clause->body_target());
    991       __ bind(&slow_case);
    992     }
    993 
    994     // Record position before stub call for type feedback.
    995     SetSourcePosition(clause->position());
    996     Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT);
    997     CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId());
    998     patch_site.EmitPatchInfo();
    999     __ test(eax, eax);
   1000     __ j(not_equal, &next_test);
   1001     __ Drop(1);  // Switch value is no longer needed.
   1002     __ jmp(clause->body_target());
   1003   }
   1004 
   1005   // Discard the test value and jump to the default if present, otherwise to
   1006   // the end of the statement.
   1007   __ bind(&next_test);
   1008   __ Drop(1);  // Switch value is no longer needed.
   1009   if (default_clause == NULL) {
   1010     __ jmp(nested_statement.break_label());
   1011   } else {
   1012     __ jmp(default_clause->body_target());
   1013   }
   1014 
   1015   // Compile all the case bodies.
   1016   for (int i = 0; i < clauses->length(); i++) {
   1017     Comment cmnt(masm_, "[ Case body");
   1018     CaseClause* clause = clauses->at(i);
   1019     __ bind(clause->body_target());
   1020     PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
   1021     VisitStatements(clause->statements());
   1022   }
   1023 
   1024   __ bind(nested_statement.break_label());
   1025   PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
   1026 }
   1027 
   1028 
   1029 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
   1030   Comment cmnt(masm_, "[ ForInStatement");
   1031   SetStatementPosition(stmt);
   1032 
   1033   Label loop, exit;
   1034   ForIn loop_statement(this, stmt);
   1035   increment_loop_depth();
   1036 
   1037   // Get the object to enumerate over. If the object is null or undefined, skip
   1038   // over the loop.  See ECMA-262 version 5, section 12.6.4.
   1039   VisitForAccumulatorValue(stmt->enumerable());
   1040   __ cmp(eax, isolate()->factory()->undefined_value());
   1041   __ j(equal, &exit);
   1042   __ cmp(eax, isolate()->factory()->null_value());
   1043   __ j(equal, &exit);
   1044 
   1045   PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
   1046 
   1047   // Convert the object to a JS object.
   1048   Label convert, done_convert;
   1049   __ JumpIfSmi(eax, &convert, Label::kNear);
   1050   __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
   1051   __ j(above_equal, &done_convert, Label::kNear);
   1052   __ bind(&convert);
   1053   __ push(eax);
   1054   __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
   1055   __ bind(&done_convert);
   1056   __ push(eax);
   1057 
   1058   // Check for proxies.
   1059   Label call_runtime, use_cache, fixed_array;
   1060   STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
   1061   __ CmpObjectType(eax, LAST_JS_PROXY_TYPE, ecx);
   1062   __ j(below_equal, &call_runtime);
   1063 
   1064   // Check cache validity in generated code. This is a fast case for
   1065   // the JSObject::IsSimpleEnum cache validity checks. If we cannot
   1066   // guarantee cache validity, call the runtime system to check cache
   1067   // validity or get the property names in a fixed array.
   1068   __ CheckEnumCache(&call_runtime);
   1069 
   1070   __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
   1071   __ jmp(&use_cache, Label::kNear);
   1072 
   1073   // Get the set of properties to enumerate.
   1074   __ bind(&call_runtime);
   1075   __ push(eax);
   1076   __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
   1077   __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
   1078          isolate()->factory()->meta_map());
   1079   __ j(not_equal, &fixed_array);
   1080 
   1081 
   1082   // We got a map in register eax. Get the enumeration cache from it.
   1083   Label no_descriptors;
   1084   __ bind(&use_cache);
   1085 
   1086   __ EnumLength(edx, eax);
   1087   __ cmp(edx, Immediate(Smi::FromInt(0)));
   1088   __ j(equal, &no_descriptors);
   1089 
   1090   __ LoadInstanceDescriptors(eax, ecx);
   1091   __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheOffset));
   1092   __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
   1093 
   1094   // Set up the four remaining stack slots.
   1095   __ push(eax);  // Map.
   1096   __ push(ecx);  // Enumeration cache.
   1097   __ push(edx);  // Number of valid entries for the map in the enum cache.
   1098   __ push(Immediate(Smi::FromInt(0)));  // Initial index.
   1099   __ jmp(&loop);
   1100 
   1101   __ bind(&no_descriptors);
   1102   __ add(esp, Immediate(kPointerSize));
   1103   __ jmp(&exit);
   1104 
   1105   // We got a fixed array in register eax. Iterate through that.
   1106   Label non_proxy;
   1107   __ bind(&fixed_array);
   1108 
   1109   Handle<Cell> cell = isolate()->factory()->NewCell(
   1110       Handle<Object>(Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker),
   1111                      isolate()));
   1112   RecordTypeFeedbackCell(stmt->ForInFeedbackId(), cell);
   1113   __ LoadHeapObject(ebx, cell);
   1114   __ mov(FieldOperand(ebx, Cell::kValueOffset),
   1115          Immediate(Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker)));
   1116 
   1117   __ mov(ebx, Immediate(Smi::FromInt(1)));  // Smi indicates slow check
   1118   __ mov(ecx, Operand(esp, 0 * kPointerSize));  // Get enumerated object
   1119   STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
   1120   __ CmpObjectType(ecx, LAST_JS_PROXY_TYPE, ecx);
   1121   __ j(above, &non_proxy);
   1122   __ mov(ebx, Immediate(Smi::FromInt(0)));  // Zero indicates proxy
   1123   __ bind(&non_proxy);
   1124   __ push(ebx);  // Smi
   1125   __ push(eax);  // Array
   1126   __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset));
   1127   __ push(eax);  // Fixed array length (as smi).
   1128   __ push(Immediate(Smi::FromInt(0)));  // Initial index.
   1129 
   1130   // Generate code for doing the condition check.
   1131   PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
   1132   __ bind(&loop);
   1133   __ mov(eax, Operand(esp, 0 * kPointerSize));  // Get the current index.
   1134   __ cmp(eax, Operand(esp, 1 * kPointerSize));  // Compare to the array length.
   1135   __ j(above_equal, loop_statement.break_label());
   1136 
   1137   // Get the current entry of the array into register ebx.
   1138   __ mov(ebx, Operand(esp, 2 * kPointerSize));
   1139   __ mov(ebx, FieldOperand(ebx, eax, times_2, FixedArray::kHeaderSize));
   1140 
   1141   // Get the expected map from the stack or a smi in the
   1142   // permanent slow case into register edx.
   1143   __ mov(edx, Operand(esp, 3 * kPointerSize));
   1144 
   1145   // Check if the expected map still matches that of the enumerable.
   1146   // If not, we may have to filter the key.
   1147   Label update_each;
   1148   __ mov(ecx, Operand(esp, 4 * kPointerSize));
   1149   __ cmp(edx, FieldOperand(ecx, HeapObject::kMapOffset));
   1150   __ j(equal, &update_each, Label::kNear);
   1151 
   1152   // For proxies, no filtering is done.
   1153   // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
   1154   ASSERT(Smi::FromInt(0) == 0);
   1155   __ test(edx, edx);
   1156   __ j(zero, &update_each);
   1157 
   1158   // Convert the entry to a string or null if it isn't a property
   1159   // anymore. If the property has been removed while iterating, we
   1160   // just skip it.
   1161   __ push(ecx);  // Enumerable.
   1162   __ push(ebx);  // Current entry.
   1163   __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
   1164   __ test(eax, eax);
   1165   __ j(equal, loop_statement.continue_label());
   1166   __ mov(ebx, eax);
   1167 
   1168   // Update the 'each' property or variable from the possibly filtered
   1169   // entry in register ebx.
   1170   __ bind(&update_each);
   1171   __ mov(result_register(), ebx);
   1172   // Perform the assignment as if via '='.
   1173   { EffectContext context(this);
   1174     EmitAssignment(stmt->each());
   1175   }
   1176 
   1177   // Generate code for the body of the loop.
   1178   Visit(stmt->body());
   1179 
   1180   // Generate code for going to the next element by incrementing the
   1181   // index (smi) stored on top of the stack.
   1182   __ bind(loop_statement.continue_label());
   1183   __ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1)));
   1184 
   1185   EmitBackEdgeBookkeeping(stmt, &loop);
   1186   __ jmp(&loop);
   1187 
   1188   // Remove the pointers stored on the stack.
   1189   __ bind(loop_statement.break_label());
   1190   __ add(esp, Immediate(5 * kPointerSize));
   1191 
   1192   // Exit and decrement the loop depth.
   1193   PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
   1194   __ bind(&exit);
   1195   decrement_loop_depth();
   1196 }
   1197 
   1198 
   1199 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
   1200   Comment cmnt(masm_, "[ ForOfStatement");
   1201   SetStatementPosition(stmt);
   1202 
   1203   Iteration loop_statement(this, stmt);
   1204   increment_loop_depth();
   1205 
   1206   // var iterator = iterable[@@iterator]()
   1207   VisitForAccumulatorValue(stmt->assign_iterator());
   1208 
   1209   // As with for-in, skip the loop if the iterator is null or undefined.
   1210   __ CompareRoot(eax, Heap::kUndefinedValueRootIndex);
   1211   __ j(equal, loop_statement.break_label());
   1212   __ CompareRoot(eax, Heap::kNullValueRootIndex);
   1213   __ j(equal, loop_statement.break_label());
   1214 
   1215   // Convert the iterator to a JS object.
   1216   Label convert, done_convert;
   1217   __ JumpIfSmi(eax, &convert);
   1218   __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
   1219   __ j(above_equal, &done_convert);
   1220   __ bind(&convert);
   1221   __ push(eax);
   1222   __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
   1223   __ bind(&done_convert);
   1224 
   1225   // Loop entry.
   1226   __ bind(loop_statement.continue_label());
   1227 
   1228   // result = iterator.next()
   1229   VisitForEffect(stmt->next_result());
   1230 
   1231   // if (result.done) break;
   1232   Label result_not_done;
   1233   VisitForControl(stmt->result_done(),
   1234                   loop_statement.break_label(),
   1235                   &result_not_done,
   1236                   &result_not_done);
   1237   __ bind(&result_not_done);
   1238 
   1239   // each = result.value
   1240   VisitForEffect(stmt->assign_each());
   1241 
   1242   // Generate code for the body of the loop.
   1243   Visit(stmt->body());
   1244 
   1245   // Check stack before looping.
   1246   PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
   1247   EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
   1248   __ jmp(loop_statement.continue_label());
   1249 
   1250   // Exit and decrement the loop depth.
   1251   PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
   1252   __ bind(loop_statement.break_label());
   1253   decrement_loop_depth();
   1254 }
   1255 
   1256 
   1257 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
   1258                                        bool pretenure) {
   1259   // Use the fast case closure allocation code that allocates in new
   1260   // space for nested functions that don't need literals cloning. If
   1261   // we're running with the --always-opt or the --prepare-always-opt
   1262   // flag, we need to use the runtime function so that the new function
   1263   // we are creating here gets a chance to have its code optimized and
   1264   // doesn't just get a copy of the existing unoptimized code.
   1265   if (!FLAG_always_opt &&
   1266       !FLAG_prepare_always_opt &&
   1267       !pretenure &&
   1268       scope()->is_function_scope() &&
   1269       info->num_literals() == 0) {
   1270     FastNewClosureStub stub(info->language_mode(), info->is_generator());
   1271     __ push(Immediate(info));
   1272     __ CallStub(&stub);
   1273   } else {
   1274     __ push(esi);
   1275     __ push(Immediate(info));
   1276     __ push(Immediate(pretenure
   1277                       ? isolate()->factory()->true_value()
   1278                       : isolate()->factory()->false_value()));
   1279     __ CallRuntime(Runtime::kNewClosure, 3);
   1280   }
   1281   context()->Plug(eax);
   1282 }
   1283 
   1284 
   1285 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
   1286   Comment cmnt(masm_, "[ VariableProxy");
   1287   EmitVariableLoad(expr);
   1288 }
   1289 
   1290 
   1291 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
   1292                                                       TypeofState typeof_state,
   1293                                                       Label* slow) {
   1294   Register context = esi;
   1295   Register temp = edx;
   1296 
   1297   Scope* s = scope();
   1298   while (s != NULL) {
   1299     if (s->num_heap_slots() > 0) {
   1300       if (s->calls_non_strict_eval()) {
   1301         // Check that extension is NULL.
   1302         __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
   1303                Immediate(0));
   1304         __ j(not_equal, slow);
   1305       }
   1306       // Load next context in chain.
   1307       __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
   1308       // Walk the rest of the chain without clobbering esi.
   1309       context = temp;
   1310     }
   1311     // If no outer scope calls eval, we do not need to check more
   1312     // context extensions.  If we have reached an eval scope, we check
   1313     // all extensions from this point.
   1314     if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break;
   1315     s = s->outer_scope();
   1316   }
   1317 
   1318   if (s != NULL && s->is_eval_scope()) {
   1319     // Loop up the context chain.  There is no frame effect so it is
   1320     // safe to use raw labels here.
   1321     Label next, fast;
   1322     if (!context.is(temp)) {
   1323       __ mov(temp, context);
   1324     }
   1325     __ bind(&next);
   1326     // Terminate at native context.
   1327     __ cmp(FieldOperand(temp, HeapObject::kMapOffset),
   1328            Immediate(isolate()->factory()->native_context_map()));
   1329     __ j(equal, &fast, Label::kNear);
   1330     // Check that extension is NULL.
   1331     __ cmp(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
   1332     __ j(not_equal, slow);
   1333     // Load next context in chain.
   1334     __ mov(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
   1335     __ jmp(&next);
   1336     __ bind(&fast);
   1337   }
   1338 
   1339   // All extension objects were empty and it is safe to use a global
   1340   // load IC call.
   1341   __ mov(edx, GlobalObjectOperand());
   1342   __ mov(ecx, var->name());
   1343   Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
   1344   RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
   1345       ? RelocInfo::CODE_TARGET
   1346       : RelocInfo::CODE_TARGET_CONTEXT;
   1347   CallIC(ic, mode);
   1348 }
   1349 
   1350 
   1351 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
   1352                                                                 Label* slow) {
   1353   ASSERT(var->IsContextSlot());
   1354   Register context = esi;
   1355   Register temp = ebx;
   1356 
   1357   for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
   1358     if (s->num_heap_slots() > 0) {
   1359       if (s->calls_non_strict_eval()) {
   1360         // Check that extension is NULL.
   1361         __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
   1362                Immediate(0));
   1363         __ j(not_equal, slow);
   1364       }
   1365       __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
   1366       // Walk the rest of the chain without clobbering esi.
   1367       context = temp;
   1368     }
   1369   }
   1370   // Check that last extension is NULL.
   1371   __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
   1372   __ j(not_equal, slow);
   1373 
   1374   // This function is used only for loads, not stores, so it's safe to
   1375   // return an esi-based operand (the write barrier cannot be allowed to
   1376   // destroy the esi register).
   1377   return ContextOperand(context, var->index());
   1378 }
   1379 
   1380 
   1381 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
   1382                                                   TypeofState typeof_state,
   1383                                                   Label* slow,
   1384                                                   Label* done) {
   1385   // Generate fast-case code for variables that might be shadowed by
   1386   // eval-introduced variables.  Eval is used a lot without
   1387   // introducing variables.  In those cases, we do not want to
   1388   // perform a runtime call for all variables in the scope
   1389   // containing the eval.
   1390   if (var->mode() == DYNAMIC_GLOBAL) {
   1391     EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
   1392     __ jmp(done);
   1393   } else if (var->mode() == DYNAMIC_LOCAL) {
   1394     Variable* local = var->local_if_not_shadowed();
   1395     __ mov(eax, ContextSlotOperandCheckExtensions(local, slow));
   1396     if (local->mode() == LET ||
   1397         local->mode() == CONST ||
   1398         local->mode() == CONST_HARMONY) {
   1399       __ cmp(eax, isolate()->factory()->the_hole_value());
   1400       __ j(not_equal, done);
   1401       if (local->mode() == CONST) {
   1402         __ mov(eax, isolate()->factory()->undefined_value());
   1403       } else {  // LET || CONST_HARMONY
   1404         __ push(Immediate(var->name()));
   1405         __ CallRuntime(Runtime::kThrowReferenceError, 1);
   1406       }
   1407     }
   1408     __ jmp(done);
   1409   }
   1410 }
   1411 
   1412 
   1413 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
   1414   // Record position before possible IC call.
   1415   SetSourcePosition(proxy->position());
   1416   Variable* var = proxy->var();
   1417 
   1418   // Three cases: global variables, lookup variables, and all other types of
   1419   // variables.
   1420   switch (var->location()) {
   1421     case Variable::UNALLOCATED: {
   1422       Comment cmnt(masm_, "Global variable");
   1423       // Use inline caching. Variable name is passed in ecx and the global
   1424       // object in eax.
   1425       __ mov(edx, GlobalObjectOperand());
   1426       __ mov(ecx, var->name());
   1427       Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
   1428       CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
   1429       context()->Plug(eax);
   1430       break;
   1431     }
   1432 
   1433     case Variable::PARAMETER:
   1434     case Variable::LOCAL:
   1435     case Variable::CONTEXT: {
   1436       Comment cmnt(masm_, var->IsContextSlot()
   1437                               ? "Context variable"
   1438                               : "Stack variable");
   1439       if (var->binding_needs_init()) {
   1440         // var->scope() may be NULL when the proxy is located in eval code and
   1441         // refers to a potential outside binding. Currently those bindings are
   1442         // always looked up dynamically, i.e. in that case
   1443         //     var->location() == LOOKUP.
   1444         // always holds.
   1445         ASSERT(var->scope() != NULL);
   1446 
   1447         // Check if the binding really needs an initialization check. The check
   1448         // can be skipped in the following situation: we have a LET or CONST
   1449         // binding in harmony mode, both the Variable and the VariableProxy have
   1450         // the same declaration scope (i.e. they are both in global code, in the
   1451         // same function or in the same eval code) and the VariableProxy is in
   1452         // the source physically located after the initializer of the variable.
   1453         //
   1454         // We cannot skip any initialization checks for CONST in non-harmony
   1455         // mode because const variables may be declared but never initialized:
   1456         //   if (false) { const x; }; var y = x;
   1457         //
   1458         // The condition on the declaration scopes is a conservative check for
   1459         // nested functions that access a binding and are called before the
   1460         // binding is initialized:
   1461         //   function() { f(); let x = 1; function f() { x = 2; } }
   1462         //
   1463         bool skip_init_check;
   1464         if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
   1465           skip_init_check = false;
   1466         } else {
   1467           // Check that we always have valid source position.
   1468           ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
   1469           ASSERT(proxy->position() != RelocInfo::kNoPosition);
   1470           skip_init_check = var->mode() != CONST &&
   1471               var->initializer_position() < proxy->position();
   1472         }
   1473 
   1474         if (!skip_init_check) {
   1475           // Let and const need a read barrier.
   1476           Label done;
   1477           GetVar(eax, var);
   1478           __ cmp(eax, isolate()->factory()->the_hole_value());
   1479           __ j(not_equal, &done, Label::kNear);
   1480           if (var->mode() == LET || var->mode() == CONST_HARMONY) {
   1481             // Throw a reference error when using an uninitialized let/const
   1482             // binding in harmony mode.
   1483             __ push(Immediate(var->name()));
   1484             __ CallRuntime(Runtime::kThrowReferenceError, 1);
   1485           } else {
   1486             // Uninitalized const bindings outside of harmony mode are unholed.
   1487             ASSERT(var->mode() == CONST);
   1488             __ mov(eax, isolate()->factory()->undefined_value());
   1489           }
   1490           __ bind(&done);
   1491           context()->Plug(eax);
   1492           break;
   1493         }
   1494       }
   1495       context()->Plug(var);
   1496       break;
   1497     }
   1498 
   1499     case Variable::LOOKUP: {
   1500       Label done, slow;
   1501       // Generate code for loading from variables potentially shadowed
   1502       // by eval-introduced variables.
   1503       EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
   1504       __ bind(&slow);
   1505       Comment cmnt(masm_, "Lookup variable");
   1506       __ push(esi);  // Context.
   1507       __ push(Immediate(var->name()));
   1508       __ CallRuntime(Runtime::kLoadContextSlot, 2);
   1509       __ bind(&done);
   1510       context()->Plug(eax);
   1511       break;
   1512     }
   1513   }
   1514 }
   1515 
   1516 
   1517 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
   1518   Comment cmnt(masm_, "[ RegExpLiteral");
   1519   Label materialized;
   1520   // Registers will be used as follows:
   1521   // edi = JS function.
   1522   // ecx = literals array.
   1523   // ebx = regexp literal.
   1524   // eax = regexp literal clone.
   1525   __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
   1526   __ mov(ecx, FieldOperand(edi, JSFunction::kLiteralsOffset));
   1527   int literal_offset =
   1528       FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
   1529   __ mov(ebx, FieldOperand(ecx, literal_offset));
   1530   __ cmp(ebx, isolate()->factory()->undefined_value());
   1531   __ j(not_equal, &materialized, Label::kNear);
   1532 
   1533   // Create regexp literal using runtime function
   1534   // Result will be in eax.
   1535   __ push(ecx);
   1536   __ push(Immediate(Smi::FromInt(expr->literal_index())));
   1537   __ push(Immediate(expr->pattern()));
   1538   __ push(Immediate(expr->flags()));
   1539   __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
   1540   __ mov(ebx, eax);
   1541 
   1542   __ bind(&materialized);
   1543   int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
   1544   Label allocated, runtime_allocate;
   1545   __ Allocate(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
   1546   __ jmp(&allocated);
   1547 
   1548   __ bind(&runtime_allocate);
   1549   __ push(ebx);
   1550   __ push(Immediate(Smi::FromInt(size)));
   1551   __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
   1552   __ pop(ebx);
   1553 
   1554   __ bind(&allocated);
   1555   // Copy the content into the newly allocated memory.
   1556   // (Unroll copy loop once for better throughput).
   1557   for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
   1558     __ mov(edx, FieldOperand(ebx, i));
   1559     __ mov(ecx, FieldOperand(ebx, i + kPointerSize));
   1560     __ mov(FieldOperand(eax, i), edx);
   1561     __ mov(FieldOperand(eax, i + kPointerSize), ecx);
   1562   }
   1563   if ((size % (2 * kPointerSize)) != 0) {
   1564     __ mov(edx, FieldOperand(ebx, size - kPointerSize));
   1565     __ mov(FieldOperand(eax, size - kPointerSize), edx);
   1566   }
   1567   context()->Plug(eax);
   1568 }
   1569 
   1570 
   1571 void FullCodeGenerator::EmitAccessor(Expression* expression) {
   1572   if (expression == NULL) {
   1573     __ push(Immediate(isolate()->factory()->null_value()));
   1574   } else {
   1575     VisitForStackValue(expression);
   1576   }
   1577 }
   1578 
   1579 
   1580 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
   1581   Comment cmnt(masm_, "[ ObjectLiteral");
   1582   Handle<FixedArray> constant_properties = expr->constant_properties();
   1583   int flags = expr->fast_elements()
   1584       ? ObjectLiteral::kFastElements
   1585       : ObjectLiteral::kNoFlags;
   1586   flags |= expr->has_function()
   1587       ? ObjectLiteral::kHasFunction
   1588       : ObjectLiteral::kNoFlags;
   1589   int properties_count = constant_properties->length() / 2;
   1590   if ((FLAG_track_double_fields && expr->may_store_doubles()) ||
   1591       expr->depth() > 1) {
   1592     __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
   1593     __ push(FieldOperand(edi, JSFunction::kLiteralsOffset));
   1594     __ push(Immediate(Smi::FromInt(expr->literal_index())));
   1595     __ push(Immediate(constant_properties));
   1596     __ push(Immediate(Smi::FromInt(flags)));
   1597     __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
   1598   } else if (Serializer::enabled() || flags != ObjectLiteral::kFastElements ||
   1599       properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
   1600     __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
   1601     __ push(FieldOperand(edi, JSFunction::kLiteralsOffset));
   1602     __ push(Immediate(Smi::FromInt(expr->literal_index())));
   1603     __ push(Immediate(constant_properties));
   1604     __ push(Immediate(Smi::FromInt(flags)));
   1605     __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
   1606   } else {
   1607     __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
   1608     __ mov(eax, FieldOperand(edi, JSFunction::kLiteralsOffset));
   1609     __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
   1610     __ mov(ecx, Immediate(constant_properties));
   1611     __ mov(edx, Immediate(Smi::FromInt(flags)));
   1612     FastCloneShallowObjectStub stub(properties_count);
   1613     __ CallStub(&stub);
   1614   }
   1615 
   1616   // If result_saved is true the result is on top of the stack.  If
   1617   // result_saved is false the result is in eax.
   1618   bool result_saved = false;
   1619 
   1620   // Mark all computed expressions that are bound to a key that
   1621   // is shadowed by a later occurrence of the same key. For the
   1622   // marked expressions, no store code is emitted.
   1623   expr->CalculateEmitStore(zone());
   1624 
   1625   AccessorTable accessor_table(zone());
   1626   for (int i = 0; i < expr->properties()->length(); i++) {
   1627     ObjectLiteral::Property* property = expr->properties()->at(i);
   1628     if (property->IsCompileTimeValue()) continue;
   1629 
   1630     Literal* key = property->key();
   1631     Expression* value = property->value();
   1632     if (!result_saved) {
   1633       __ push(eax);  // Save result on the stack
   1634       result_saved = true;
   1635     }
   1636     switch (property->kind()) {
   1637       case ObjectLiteral::Property::CONSTANT:
   1638         UNREACHABLE();
   1639       case ObjectLiteral::Property::MATERIALIZED_LITERAL:
   1640         ASSERT(!CompileTimeValue::IsCompileTimeValue(value));
   1641         // Fall through.
   1642       case ObjectLiteral::Property::COMPUTED:
   1643         if (key->value()->IsInternalizedString()) {
   1644           if (property->emit_store()) {
   1645             VisitForAccumulatorValue(value);
   1646             __ mov(ecx, Immediate(key->value()));
   1647             __ mov(edx, Operand(esp, 0));
   1648             Handle<Code> ic = is_classic_mode()
   1649                 ? isolate()->builtins()->StoreIC_Initialize()
   1650                 : isolate()->builtins()->StoreIC_Initialize_Strict();
   1651             CallIC(ic, RelocInfo::CODE_TARGET, key->LiteralFeedbackId());
   1652             PrepareForBailoutForId(key->id(), NO_REGISTERS);
   1653           } else {
   1654             VisitForEffect(value);
   1655           }
   1656           break;
   1657         }
   1658         __ push(Operand(esp, 0));  // Duplicate receiver.
   1659         VisitForStackValue(key);
   1660         VisitForStackValue(value);
   1661         if (property->emit_store()) {
   1662           __ push(Immediate(Smi::FromInt(NONE)));  // PropertyAttributes
   1663           __ CallRuntime(Runtime::kSetProperty, 4);
   1664         } else {
   1665           __ Drop(3);
   1666         }
   1667         break;
   1668       case ObjectLiteral::Property::PROTOTYPE:
   1669         __ push(Operand(esp, 0));  // Duplicate receiver.
   1670         VisitForStackValue(value);
   1671         if (property->emit_store()) {
   1672           __ CallRuntime(Runtime::kSetPrototype, 2);
   1673         } else {
   1674           __ Drop(2);
   1675         }
   1676         break;
   1677       case ObjectLiteral::Property::GETTER:
   1678         accessor_table.lookup(key)->second->getter = value;
   1679         break;
   1680       case ObjectLiteral::Property::SETTER:
   1681         accessor_table.lookup(key)->second->setter = value;
   1682         break;
   1683     }
   1684   }
   1685 
   1686   // Emit code to define accessors, using only a single call to the runtime for
   1687   // each pair of corresponding getters and setters.
   1688   for (AccessorTable::Iterator it = accessor_table.begin();
   1689        it != accessor_table.end();
   1690        ++it) {
   1691     __ push(Operand(esp, 0));  // Duplicate receiver.
   1692     VisitForStackValue(it->first);
   1693     EmitAccessor(it->second->getter);
   1694     EmitAccessor(it->second->setter);
   1695     __ push(Immediate(Smi::FromInt(NONE)));
   1696     __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
   1697   }
   1698 
   1699   if (expr->has_function()) {
   1700     ASSERT(result_saved);
   1701     __ push(Operand(esp, 0));
   1702     __ CallRuntime(Runtime::kToFastProperties, 1);
   1703   }
   1704 
   1705   if (result_saved) {
   1706     context()->PlugTOS();
   1707   } else {
   1708     context()->Plug(eax);
   1709   }
   1710 }
   1711 
   1712 
   1713 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
   1714   Comment cmnt(masm_, "[ ArrayLiteral");
   1715 
   1716   ZoneList<Expression*>* subexprs = expr->values();
   1717   int length = subexprs->length();
   1718   Handle<FixedArray> constant_elements = expr->constant_elements();
   1719   ASSERT_EQ(2, constant_elements->length());
   1720   ElementsKind constant_elements_kind =
   1721       static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
   1722   bool has_constant_fast_elements =
   1723       IsFastObjectElementsKind(constant_elements_kind);
   1724   Handle<FixedArrayBase> constant_elements_values(
   1725       FixedArrayBase::cast(constant_elements->get(1)));
   1726 
   1727   Heap* heap = isolate()->heap();
   1728   if (has_constant_fast_elements &&
   1729       constant_elements_values->map() == heap->fixed_cow_array_map()) {
   1730     // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
   1731     // change, so it's possible to specialize the stub in advance.
   1732     __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
   1733     __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
   1734     __ mov(eax, FieldOperand(ebx, JSFunction::kLiteralsOffset));
   1735     __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
   1736     __ mov(ecx, Immediate(constant_elements));
   1737     FastCloneShallowArrayStub stub(
   1738         FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
   1739         DONT_TRACK_ALLOCATION_SITE,
   1740         length);
   1741     __ CallStub(&stub);
   1742   } else if (expr->depth() > 1) {
   1743     __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
   1744     __ push(FieldOperand(ebx, JSFunction::kLiteralsOffset));
   1745     __ push(Immediate(Smi::FromInt(expr->literal_index())));
   1746     __ push(Immediate(constant_elements));
   1747     __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
   1748   } else if (Serializer::enabled() ||
   1749       length > FastCloneShallowArrayStub::kMaximumClonedLength) {
   1750     __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
   1751     __ push(FieldOperand(ebx, JSFunction::kLiteralsOffset));
   1752     __ push(Immediate(Smi::FromInt(expr->literal_index())));
   1753     __ push(Immediate(constant_elements));
   1754     __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
   1755   } else {
   1756     ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
   1757            FLAG_smi_only_arrays);
   1758     FastCloneShallowArrayStub::Mode mode =
   1759         FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
   1760     AllocationSiteMode allocation_site_mode = FLAG_track_allocation_sites
   1761         ? TRACK_ALLOCATION_SITE : DONT_TRACK_ALLOCATION_SITE;
   1762 
   1763     // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
   1764     // change, so it's possible to specialize the stub in advance.
   1765     if (has_constant_fast_elements) {
   1766       mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
   1767       allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
   1768     }
   1769 
   1770     __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
   1771     __ mov(eax, FieldOperand(ebx, JSFunction::kLiteralsOffset));
   1772     __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
   1773     __ mov(ecx, Immediate(constant_elements));
   1774     FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
   1775     __ CallStub(&stub);
   1776   }
   1777 
   1778   bool result_saved = false;  // Is the result saved to the stack?
   1779 
   1780   // Emit code to evaluate all the non-constant subexpressions and to store
   1781   // them into the newly cloned array.
   1782   for (int i = 0; i < length; i++) {
   1783     Expression* subexpr = subexprs->at(i);
   1784     // If the subexpression is a literal or a simple materialized literal it
   1785     // is already set in the cloned array.
   1786     if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
   1787 
   1788     if (!result_saved) {
   1789       __ push(eax);  // array literal.
   1790       __ push(Immediate(Smi::FromInt(expr->literal_index())));
   1791       result_saved = true;
   1792     }
   1793     VisitForAccumulatorValue(subexpr);
   1794 
   1795     if (IsFastObjectElementsKind(constant_elements_kind)) {
   1796       // Fast-case array literal with ElementsKind of FAST_*_ELEMENTS, they
   1797       // cannot transition and don't need to call the runtime stub.
   1798       int offset = FixedArray::kHeaderSize + (i * kPointerSize);
   1799       __ mov(ebx, Operand(esp, kPointerSize));  // Copy of array literal.
   1800       __ mov(ebx, FieldOperand(ebx, JSObject::kElementsOffset));
   1801       // Store the subexpression value in the array's elements.
   1802       __ mov(FieldOperand(ebx, offset), result_register());
   1803       // Update the write barrier for the array store.
   1804       __ RecordWriteField(ebx, offset, result_register(), ecx,
   1805                           kDontSaveFPRegs,
   1806                           EMIT_REMEMBERED_SET,
   1807                           INLINE_SMI_CHECK);
   1808     } else {
   1809       // Store the subexpression value in the array's elements.
   1810       __ mov(ecx, Immediate(Smi::FromInt(i)));
   1811       StoreArrayLiteralElementStub stub;
   1812       __ CallStub(&stub);
   1813     }
   1814 
   1815     PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
   1816   }
   1817 
   1818   if (result_saved) {
   1819     __ add(esp, Immediate(kPointerSize));  // literal index
   1820     context()->PlugTOS();
   1821   } else {
   1822     context()->Plug(eax);
   1823   }
   1824 }
   1825 
   1826 
   1827 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
   1828   Comment cmnt(masm_, "[ Assignment");
   1829   // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
   1830   // on the left-hand side.
   1831   if (!expr->target()->IsValidLeftHandSide()) {
   1832     VisitForEffect(expr->target());
   1833     return;
   1834   }
   1835 
   1836   // Left-hand side can only be a property, a global or a (parameter or local)
   1837   // slot.
   1838   enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
   1839   LhsKind assign_type = VARIABLE;
   1840   Property* property = expr->target()->AsProperty();
   1841   if (property != NULL) {
   1842     assign_type = (property->key()->IsPropertyName())
   1843         ? NAMED_PROPERTY
   1844         : KEYED_PROPERTY;
   1845   }
   1846 
   1847   // Evaluate LHS expression.
   1848   switch (assign_type) {
   1849     case VARIABLE:
   1850       // Nothing to do here.
   1851       break;
   1852     case NAMED_PROPERTY:
   1853       if (expr->is_compound()) {
   1854         // We need the receiver both on the stack and in edx.
   1855         VisitForStackValue(property->obj());
   1856         __ mov(edx, Operand(esp, 0));
   1857       } else {
   1858         VisitForStackValue(property->obj());
   1859       }
   1860       break;
   1861     case KEYED_PROPERTY: {
   1862       if (expr->is_compound()) {
   1863         VisitForStackValue(property->obj());
   1864         VisitForStackValue(property->key());
   1865         __ mov(edx, Operand(esp, kPointerSize));  // Object.
   1866         __ mov(ecx, Operand(esp, 0));             // Key.
   1867       } else {
   1868         VisitForStackValue(property->obj());
   1869         VisitForStackValue(property->key());
   1870       }
   1871       break;
   1872     }
   1873   }
   1874 
   1875   // For compound assignments we need another deoptimization point after the
   1876   // variable/property load.
   1877   if (expr->is_compound()) {
   1878     AccumulatorValueContext result_context(this);
   1879     { AccumulatorValueContext left_operand_context(this);
   1880       switch (assign_type) {
   1881         case VARIABLE:
   1882           EmitVariableLoad(expr->target()->AsVariableProxy());
   1883           PrepareForBailout(expr->target(), TOS_REG);
   1884           break;
   1885         case NAMED_PROPERTY:
   1886           EmitNamedPropertyLoad(property);
   1887           PrepareForBailoutForId(property->LoadId(), TOS_REG);
   1888           break;
   1889         case KEYED_PROPERTY:
   1890           EmitKeyedPropertyLoad(property);
   1891           PrepareForBailoutForId(property->LoadId(), TOS_REG);
   1892           break;
   1893       }
   1894     }
   1895 
   1896     Token::Value op = expr->binary_op();
   1897     __ push(eax);  // Left operand goes on the stack.
   1898     VisitForAccumulatorValue(expr->value());
   1899 
   1900     OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
   1901         ? OVERWRITE_RIGHT
   1902         : NO_OVERWRITE;
   1903     SetSourcePosition(expr->position() + 1);
   1904     if (ShouldInlineSmiCase(op)) {
   1905       EmitInlineSmiBinaryOp(expr->binary_operation(),
   1906                             op,
   1907                             mode,
   1908                             expr->target(),
   1909                             expr->value());
   1910     } else {
   1911       EmitBinaryOp(expr->binary_operation(), op, mode);
   1912     }
   1913 
   1914     // Deoptimization point in case the binary operation may have side effects.
   1915     PrepareForBailout(expr->binary_operation(), TOS_REG);
   1916   } else {
   1917     VisitForAccumulatorValue(expr->value());
   1918   }
   1919 
   1920   // Record source position before possible IC call.
   1921   SetSourcePosition(expr->position());
   1922 
   1923   // Store the value.
   1924   switch (assign_type) {
   1925     case VARIABLE:
   1926       EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
   1927                              expr->op());
   1928       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   1929       context()->Plug(eax);
   1930       break;
   1931     case NAMED_PROPERTY:
   1932       EmitNamedPropertyAssignment(expr);
   1933       break;
   1934     case KEYED_PROPERTY:
   1935       EmitKeyedPropertyAssignment(expr);
   1936       break;
   1937   }
   1938 }
   1939 
   1940 
   1941 void FullCodeGenerator::VisitYield(Yield* expr) {
   1942   Comment cmnt(masm_, "[ Yield");
   1943   // Evaluate yielded value first; the initial iterator definition depends on
   1944   // this.  It stays on the stack while we update the iterator.
   1945   VisitForStackValue(expr->expression());
   1946 
   1947   switch (expr->yield_kind()) {
   1948     case Yield::SUSPEND:
   1949       // Pop value from top-of-stack slot; box result into result register.
   1950       EmitCreateIteratorResult(false);
   1951       __ push(result_register());
   1952       // Fall through.
   1953     case Yield::INITIAL: {
   1954       Label suspend, continuation, post_runtime, resume;
   1955 
   1956       __ jmp(&suspend);
   1957 
   1958       __ bind(&continuation);
   1959       __ jmp(&resume);
   1960 
   1961       __ bind(&suspend);
   1962       VisitForAccumulatorValue(expr->generator_object());
   1963       ASSERT(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
   1964       __ mov(FieldOperand(eax, JSGeneratorObject::kContinuationOffset),
   1965              Immediate(Smi::FromInt(continuation.pos())));
   1966       __ mov(FieldOperand(eax, JSGeneratorObject::kContextOffset), esi);
   1967       __ mov(ecx, esi);
   1968       __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx,
   1969                           kDontSaveFPRegs);
   1970       __ lea(ebx, Operand(ebp, StandardFrameConstants::kExpressionsOffset));
   1971       __ cmp(esp, ebx);
   1972       __ j(equal, &post_runtime);
   1973       __ push(eax);  // generator object
   1974       __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
   1975       __ mov(context_register(),
   1976              Operand(ebp, StandardFrameConstants::kContextOffset));
   1977       __ bind(&post_runtime);
   1978       __ pop(result_register());
   1979       EmitReturnSequence();
   1980 
   1981       __ bind(&resume);
   1982       context()->Plug(result_register());
   1983       break;
   1984     }
   1985 
   1986     case Yield::FINAL: {
   1987       VisitForAccumulatorValue(expr->generator_object());
   1988       __ mov(FieldOperand(result_register(),
   1989                           JSGeneratorObject::kContinuationOffset),
   1990              Immediate(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
   1991       // Pop value from top-of-stack slot, box result into result register.
   1992       EmitCreateIteratorResult(true);
   1993       EmitUnwindBeforeReturn();
   1994       EmitReturnSequence();
   1995       break;
   1996     }
   1997 
   1998     case Yield::DELEGATING: {
   1999       VisitForStackValue(expr->generator_object());
   2000 
   2001       // Initial stack layout is as follows:
   2002       // [sp + 1 * kPointerSize] iter
   2003       // [sp + 0 * kPointerSize] g
   2004 
   2005       Label l_catch, l_try, l_suspend, l_continuation, l_resume;
   2006       Label l_next, l_call, l_loop;
   2007       // Initial send value is undefined.
   2008       __ mov(eax, isolate()->factory()->undefined_value());
   2009       __ jmp(&l_next);
   2010 
   2011       // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
   2012       __ bind(&l_catch);
   2013       handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
   2014       __ mov(ecx, isolate()->factory()->throw_string());  // "throw"
   2015       __ push(ecx);                                      // "throw"
   2016       __ push(Operand(esp, 2 * kPointerSize));           // iter
   2017       __ push(eax);                                      // exception
   2018       __ jmp(&l_call);
   2019 
   2020       // try { received = %yield result }
   2021       // Shuffle the received result above a try handler and yield it without
   2022       // re-boxing.
   2023       __ bind(&l_try);
   2024       __ pop(eax);                                       // result
   2025       __ PushTryHandler(StackHandler::CATCH, expr->index());
   2026       const int handler_size = StackHandlerConstants::kSize;
   2027       __ push(eax);                                      // result
   2028       __ jmp(&l_suspend);
   2029       __ bind(&l_continuation);
   2030       __ jmp(&l_resume);
   2031       __ bind(&l_suspend);
   2032       const int generator_object_depth = kPointerSize + handler_size;
   2033       __ mov(eax, Operand(esp, generator_object_depth));
   2034       __ push(eax);                                      // g
   2035       ASSERT(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
   2036       __ mov(FieldOperand(eax, JSGeneratorObject::kContinuationOffset),
   2037              Immediate(Smi::FromInt(l_continuation.pos())));
   2038       __ mov(FieldOperand(eax, JSGeneratorObject::kContextOffset), esi);
   2039       __ mov(ecx, esi);
   2040       __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx,
   2041                           kDontSaveFPRegs);
   2042       __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
   2043       __ mov(context_register(),
   2044              Operand(ebp, StandardFrameConstants::kContextOffset));
   2045       __ pop(eax);                                       // result
   2046       EmitReturnSequence();
   2047       __ bind(&l_resume);                                // received in eax
   2048       __ PopTryHandler();
   2049 
   2050       // receiver = iter; f = iter.next; arg = received;
   2051       __ bind(&l_next);
   2052       __ mov(ecx, isolate()->factory()->next_string());  // "next"
   2053       __ push(ecx);
   2054       __ push(Operand(esp, 2 * kPointerSize));           // iter
   2055       __ push(eax);                                      // received
   2056 
   2057       // result = receiver[f](arg);
   2058       __ bind(&l_call);
   2059       Handle<Code> ic = isolate()->stub_cache()->ComputeKeyedCallInitialize(1);
   2060       CallIC(ic);
   2061       __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
   2062       __ Drop(1);  // The key is still on the stack; drop it.
   2063 
   2064       // if (!result.done) goto l_try;
   2065       __ bind(&l_loop);
   2066       __ push(eax);                                      // save result
   2067       __ mov(edx, eax);                                  // result
   2068       __ mov(ecx, isolate()->factory()->done_string());  // "done"
   2069       Handle<Code> done_ic = isolate()->builtins()->LoadIC_Initialize();
   2070       CallIC(done_ic);                                   // result.done in eax
   2071       Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
   2072       CallIC(bool_ic);
   2073       __ test(eax, eax);
   2074       __ j(zero, &l_try);
   2075 
   2076       // result.value
   2077       __ pop(edx);                                       // result
   2078       __ mov(ecx, isolate()->factory()->value_string());  // "value"
   2079       Handle<Code> value_ic = isolate()->builtins()->LoadIC_Initialize();
   2080       CallIC(value_ic);                                  // result.value in eax
   2081       context()->DropAndPlug(2, eax);                    // drop iter and g
   2082       break;
   2083     }
   2084   }
   2085 }
   2086 
   2087 
   2088 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
   2089     Expression *value,
   2090     JSGeneratorObject::ResumeMode resume_mode) {
   2091   // The value stays in eax, and is ultimately read by the resumed generator, as
   2092   // if the CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it.  ebx
   2093   // will hold the generator object until the activation has been resumed.
   2094   VisitForStackValue(generator);
   2095   VisitForAccumulatorValue(value);
   2096   __ pop(ebx);
   2097 
   2098   // Check generator state.
   2099   Label wrong_state, done;
   2100   STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting <= 0);
   2101   STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed <= 0);
   2102   __ cmp(FieldOperand(ebx, JSGeneratorObject::kContinuationOffset),
   2103          Immediate(Smi::FromInt(0)));
   2104   __ j(less_equal, &wrong_state);
   2105 
   2106   // Load suspended function and context.
   2107   __ mov(esi, FieldOperand(ebx, JSGeneratorObject::kContextOffset));
   2108   __ mov(edi, FieldOperand(ebx, JSGeneratorObject::kFunctionOffset));
   2109 
   2110   // Push receiver.
   2111   __ push(FieldOperand(ebx, JSGeneratorObject::kReceiverOffset));
   2112 
   2113   // Push holes for arguments to generator function.
   2114   __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
   2115   __ mov(edx,
   2116          FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
   2117   __ mov(ecx, isolate()->factory()->the_hole_value());
   2118   Label push_argument_holes, push_frame;
   2119   __ bind(&push_argument_holes);
   2120   __ sub(edx, Immediate(Smi::FromInt(1)));
   2121   __ j(carry, &push_frame);
   2122   __ push(ecx);
   2123   __ jmp(&push_argument_holes);
   2124 
   2125   // Enter a new JavaScript frame, and initialize its slots as they were when
   2126   // the generator was suspended.
   2127   Label resume_frame;
   2128   __ bind(&push_frame);
   2129   __ call(&resume_frame);
   2130   __ jmp(&done);
   2131   __ bind(&resume_frame);
   2132   __ push(ebp);  // Caller's frame pointer.
   2133   __ mov(ebp, esp);
   2134   __ push(esi);  // Callee's context.
   2135   __ push(edi);  // Callee's JS Function.
   2136 
   2137   // Load the operand stack size.
   2138   __ mov(edx, FieldOperand(ebx, JSGeneratorObject::kOperandStackOffset));
   2139   __ mov(edx, FieldOperand(edx, FixedArray::kLengthOffset));
   2140   __ SmiUntag(edx);
   2141 
   2142   // If we are sending a value and there is no operand stack, we can jump back
   2143   // in directly.
   2144   if (resume_mode == JSGeneratorObject::NEXT) {
   2145     Label slow_resume;
   2146     __ cmp(edx, Immediate(0));
   2147     __ j(not_zero, &slow_resume);
   2148     __ mov(edx, FieldOperand(edi, JSFunction::kCodeEntryOffset));
   2149     __ mov(ecx, FieldOperand(ebx, JSGeneratorObject::kContinuationOffset));
   2150     __ SmiUntag(ecx);
   2151     __ add(edx, ecx);
   2152     __ mov(FieldOperand(ebx, JSGeneratorObject::kContinuationOffset),
   2153            Immediate(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
   2154     __ jmp(edx);
   2155     __ bind(&slow_resume);
   2156   }
   2157 
   2158   // Otherwise, we push holes for the operand stack and call the runtime to fix
   2159   // up the stack and the handlers.
   2160   Label push_operand_holes, call_resume;
   2161   __ bind(&push_operand_holes);
   2162   __ sub(edx, Immediate(1));
   2163   __ j(carry, &call_resume);
   2164   __ push(ecx);
   2165   __ jmp(&push_operand_holes);
   2166   __ bind(&call_resume);
   2167   __ push(ebx);
   2168   __ push(result_register());
   2169   __ Push(Smi::FromInt(resume_mode));
   2170   __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
   2171   // Not reached: the runtime call returns elsewhere.
   2172   __ Abort(kGeneratorFailedToResume);
   2173 
   2174   // Throw error if we attempt to operate on a running generator.
   2175   __ bind(&wrong_state);
   2176   __ push(ebx);
   2177   __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
   2178 
   2179   __ bind(&done);
   2180   context()->Plug(result_register());
   2181 }
   2182 
   2183 
   2184 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
   2185   Label gc_required;
   2186   Label allocated;
   2187 
   2188   Handle<Map> map(isolate()->native_context()->generator_result_map());
   2189 
   2190   __ Allocate(map->instance_size(), eax, ecx, edx, &gc_required, TAG_OBJECT);
   2191   __ jmp(&allocated);
   2192 
   2193   __ bind(&gc_required);
   2194   __ Push(Smi::FromInt(map->instance_size()));
   2195   __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
   2196   __ mov(context_register(),
   2197          Operand(ebp, StandardFrameConstants::kContextOffset));
   2198 
   2199   __ bind(&allocated);
   2200   __ mov(ebx, map);
   2201   __ pop(ecx);
   2202   __ mov(edx, isolate()->factory()->ToBoolean(done));
   2203   ASSERT_EQ(map->instance_size(), 5 * kPointerSize);
   2204   __ mov(FieldOperand(eax, HeapObject::kMapOffset), ebx);
   2205   __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
   2206          isolate()->factory()->empty_fixed_array());
   2207   __ mov(FieldOperand(eax, JSObject::kElementsOffset),
   2208          isolate()->factory()->empty_fixed_array());
   2209   __ mov(FieldOperand(eax, JSGeneratorObject::kResultValuePropertyOffset), ecx);
   2210   __ mov(FieldOperand(eax, JSGeneratorObject::kResultDonePropertyOffset), edx);
   2211 
   2212   // Only the value field needs a write barrier, as the other values are in the
   2213   // root set.
   2214   __ RecordWriteField(eax, JSGeneratorObject::kResultValuePropertyOffset,
   2215                       ecx, edx, kDontSaveFPRegs);
   2216 }
   2217 
   2218 
   2219 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
   2220   SetSourcePosition(prop->position());
   2221   Literal* key = prop->key()->AsLiteral();
   2222   ASSERT(!key->value()->IsSmi());
   2223   __ mov(ecx, Immediate(key->value()));
   2224   Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
   2225   CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
   2226 }
   2227 
   2228 
   2229 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
   2230   SetSourcePosition(prop->position());
   2231   Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
   2232   CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
   2233 }
   2234 
   2235 
   2236 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
   2237                                               Token::Value op,
   2238                                               OverwriteMode mode,
   2239                                               Expression* left,
   2240                                               Expression* right) {
   2241   // Do combined smi check of the operands. Left operand is on the
   2242   // stack. Right operand is in eax.
   2243   Label smi_case, done, stub_call;
   2244   __ pop(edx);
   2245   __ mov(ecx, eax);
   2246   __ or_(eax, edx);
   2247   JumpPatchSite patch_site(masm_);
   2248   patch_site.EmitJumpIfSmi(eax, &smi_case, Label::kNear);
   2249 
   2250   __ bind(&stub_call);
   2251   __ mov(eax, ecx);
   2252   BinaryOpStub stub(op, mode);
   2253   CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET,
   2254          expr->BinaryOperationFeedbackId());
   2255   patch_site.EmitPatchInfo();
   2256   __ jmp(&done, Label::kNear);
   2257 
   2258   // Smi case.
   2259   __ bind(&smi_case);
   2260   __ mov(eax, edx);  // Copy left operand in case of a stub call.
   2261 
   2262   switch (op) {
   2263     case Token::SAR:
   2264       __ SmiUntag(eax);
   2265       __ SmiUntag(ecx);
   2266       __ sar_cl(eax);  // No checks of result necessary
   2267       __ SmiTag(eax);
   2268       break;
   2269     case Token::SHL: {
   2270       Label result_ok;
   2271       __ SmiUntag(eax);
   2272       __ SmiUntag(ecx);
   2273       __ shl_cl(eax);
   2274       // Check that the *signed* result fits in a smi.
   2275       __ cmp(eax, 0xc0000000);
   2276       __ j(positive, &result_ok);
   2277       __ SmiTag(ecx);
   2278       __ jmp(&stub_call);
   2279       __ bind(&result_ok);
   2280       __ SmiTag(eax);
   2281       break;
   2282     }
   2283     case Token::SHR: {
   2284       Label result_ok;
   2285       __ SmiUntag(eax);
   2286       __ SmiUntag(ecx);
   2287       __ shr_cl(eax);
   2288       __ test(eax, Immediate(0xc0000000));
   2289       __ j(zero, &result_ok);
   2290       __ SmiTag(ecx);
   2291       __ jmp(&stub_call);
   2292       __ bind(&result_ok);
   2293       __ SmiTag(eax);
   2294       break;
   2295     }
   2296     case Token::ADD:
   2297       __ add(eax, ecx);
   2298       __ j(overflow, &stub_call);
   2299       break;
   2300     case Token::SUB:
   2301       __ sub(eax, ecx);
   2302       __ j(overflow, &stub_call);
   2303       break;
   2304     case Token::MUL: {
   2305       __ SmiUntag(eax);
   2306       __ imul(eax, ecx);
   2307       __ j(overflow, &stub_call);
   2308       __ test(eax, eax);
   2309       __ j(not_zero, &done, Label::kNear);
   2310       __ mov(ebx, edx);
   2311       __ or_(ebx, ecx);
   2312       __ j(negative, &stub_call);
   2313       break;
   2314     }
   2315     case Token::BIT_OR:
   2316       __ or_(eax, ecx);
   2317       break;
   2318     case Token::BIT_AND:
   2319       __ and_(eax, ecx);
   2320       break;
   2321     case Token::BIT_XOR:
   2322       __ xor_(eax, ecx);
   2323       break;
   2324     default:
   2325       UNREACHABLE();
   2326   }
   2327 
   2328   __ bind(&done);
   2329   context()->Plug(eax);
   2330 }
   2331 
   2332 
   2333 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
   2334                                      Token::Value op,
   2335                                      OverwriteMode mode) {
   2336   __ pop(edx);
   2337   BinaryOpStub stub(op, mode);
   2338   JumpPatchSite patch_site(masm_);    // unbound, signals no inlined smi code.
   2339   CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET,
   2340          expr->BinaryOperationFeedbackId());
   2341   patch_site.EmitPatchInfo();
   2342   context()->Plug(eax);
   2343 }
   2344 
   2345 
   2346 void FullCodeGenerator::EmitAssignment(Expression* expr) {
   2347   // Invalid left-hand sides are rewritten by the parser to have a 'throw
   2348   // ReferenceError' on the left-hand side.
   2349   if (!expr->IsValidLeftHandSide()) {
   2350     VisitForEffect(expr);
   2351     return;
   2352   }
   2353 
   2354   // Left-hand side can only be a property, a global or a (parameter or local)
   2355   // slot.
   2356   enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
   2357   LhsKind assign_type = VARIABLE;
   2358   Property* prop = expr->AsProperty();
   2359   if (prop != NULL) {
   2360     assign_type = (prop->key()->IsPropertyName())
   2361         ? NAMED_PROPERTY
   2362         : KEYED_PROPERTY;
   2363   }
   2364 
   2365   switch (assign_type) {
   2366     case VARIABLE: {
   2367       Variable* var = expr->AsVariableProxy()->var();
   2368       EffectContext context(this);
   2369       EmitVariableAssignment(var, Token::ASSIGN);
   2370       break;
   2371     }
   2372     case NAMED_PROPERTY: {
   2373       __ push(eax);  // Preserve value.
   2374       VisitForAccumulatorValue(prop->obj());
   2375       __ mov(edx, eax);
   2376       __ pop(eax);  // Restore value.
   2377       __ mov(ecx, prop->key()->AsLiteral()->value());
   2378       Handle<Code> ic = is_classic_mode()
   2379           ? isolate()->builtins()->StoreIC_Initialize()
   2380           : isolate()->builtins()->StoreIC_Initialize_Strict();
   2381       CallIC(ic);
   2382       break;
   2383     }
   2384     case KEYED_PROPERTY: {
   2385       __ push(eax);  // Preserve value.
   2386       VisitForStackValue(prop->obj());
   2387       VisitForAccumulatorValue(prop->key());
   2388       __ mov(ecx, eax);
   2389       __ pop(edx);  // Receiver.
   2390       __ pop(eax);  // Restore value.
   2391       Handle<Code> ic = is_classic_mode()
   2392           ? isolate()->builtins()->KeyedStoreIC_Initialize()
   2393           : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
   2394       CallIC(ic);
   2395       break;
   2396     }
   2397   }
   2398   context()->Plug(eax);
   2399 }
   2400 
   2401 
   2402 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
   2403                                                Token::Value op) {
   2404   if (var->IsUnallocated()) {
   2405     // Global var, const, or let.
   2406     __ mov(ecx, var->name());
   2407     __ mov(edx, GlobalObjectOperand());
   2408     Handle<Code> ic = is_classic_mode()
   2409         ? isolate()->builtins()->StoreIC_Initialize()
   2410         : isolate()->builtins()->StoreIC_Initialize_Strict();
   2411     CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
   2412 
   2413   } else if (op == Token::INIT_CONST) {
   2414     // Const initializers need a write barrier.
   2415     ASSERT(!var->IsParameter());  // No const parameters.
   2416     if (var->IsStackLocal()) {
   2417       Label skip;
   2418       __ mov(edx, StackOperand(var));
   2419       __ cmp(edx, isolate()->factory()->the_hole_value());
   2420       __ j(not_equal, &skip);
   2421       __ mov(StackOperand(var), eax);
   2422       __ bind(&skip);
   2423     } else {
   2424       ASSERT(var->IsContextSlot() || var->IsLookupSlot());
   2425       // Like var declarations, const declarations are hoisted to function
   2426       // scope.  However, unlike var initializers, const initializers are
   2427       // able to drill a hole to that function context, even from inside a
   2428       // 'with' context.  We thus bypass the normal static scope lookup for
   2429       // var->IsContextSlot().
   2430       __ push(eax);
   2431       __ push(esi);
   2432       __ push(Immediate(var->name()));
   2433       __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
   2434     }
   2435 
   2436   } else if (var->mode() == LET && op != Token::INIT_LET) {
   2437     // Non-initializing assignment to let variable needs a write barrier.
   2438     if (var->IsLookupSlot()) {
   2439       __ push(eax);  // Value.
   2440       __ push(esi);  // Context.
   2441       __ push(Immediate(var->name()));
   2442       __ push(Immediate(Smi::FromInt(language_mode())));
   2443       __ CallRuntime(Runtime::kStoreContextSlot, 4);
   2444     } else {
   2445       ASSERT(var->IsStackAllocated() || var->IsContextSlot());
   2446       Label assign;
   2447       MemOperand location = VarOperand(var, ecx);
   2448       __ mov(edx, location);
   2449       __ cmp(edx, isolate()->factory()->the_hole_value());
   2450       __ j(not_equal, &assign, Label::kNear);
   2451       __ push(Immediate(var->name()));
   2452       __ CallRuntime(Runtime::kThrowReferenceError, 1);
   2453       __ bind(&assign);
   2454       __ mov(location, eax);
   2455       if (var->IsContextSlot()) {
   2456         __ mov(edx, eax);
   2457         int offset = Context::SlotOffset(var->index());
   2458         __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs);
   2459       }
   2460     }
   2461 
   2462   } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
   2463     // Assignment to var or initializing assignment to let/const
   2464     // in harmony mode.
   2465     if (var->IsStackAllocated() || var->IsContextSlot()) {
   2466       MemOperand location = VarOperand(var, ecx);
   2467       if (generate_debug_code_ && op == Token::INIT_LET) {
   2468         // Check for an uninitialized let binding.
   2469         __ mov(edx, location);
   2470         __ cmp(edx, isolate()->factory()->the_hole_value());
   2471         __ Check(equal, kLetBindingReInitialization);
   2472       }
   2473       // Perform the assignment.
   2474       __ mov(location, eax);
   2475       if (var->IsContextSlot()) {
   2476         __ mov(edx, eax);
   2477         int offset = Context::SlotOffset(var->index());
   2478         __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs);
   2479       }
   2480     } else {
   2481       ASSERT(var->IsLookupSlot());
   2482       __ push(eax);  // Value.
   2483       __ push(esi);  // Context.
   2484       __ push(Immediate(var->name()));
   2485       __ push(Immediate(Smi::FromInt(language_mode())));
   2486       __ CallRuntime(Runtime::kStoreContextSlot, 4);
   2487     }
   2488   }
   2489   // Non-initializing assignments to consts are ignored.
   2490 }
   2491 
   2492 
   2493 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
   2494   // Assignment to a property, using a named store IC.
   2495   // eax    : value
   2496   // esp[0] : receiver
   2497 
   2498   Property* prop = expr->target()->AsProperty();
   2499   ASSERT(prop != NULL);
   2500   ASSERT(prop->key()->AsLiteral() != NULL);
   2501 
   2502   // Record source code position before IC call.
   2503   SetSourcePosition(expr->position());
   2504   __ mov(ecx, prop->key()->AsLiteral()->value());
   2505   __ pop(edx);
   2506   Handle<Code> ic = is_classic_mode()
   2507       ? isolate()->builtins()->StoreIC_Initialize()
   2508       : isolate()->builtins()->StoreIC_Initialize_Strict();
   2509   CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
   2510 
   2511   PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   2512   context()->Plug(eax);
   2513 }
   2514 
   2515 
   2516 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
   2517   // Assignment to a property, using a keyed store IC.
   2518   // eax               : value
   2519   // esp[0]            : key
   2520   // esp[kPointerSize] : receiver
   2521 
   2522   __ pop(ecx);  // Key.
   2523   __ pop(edx);
   2524   // Record source code position before IC call.
   2525   SetSourcePosition(expr->position());
   2526   Handle<Code> ic = is_classic_mode()
   2527       ? isolate()->builtins()->KeyedStoreIC_Initialize()
   2528       : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
   2529   CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
   2530 
   2531   PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   2532   context()->Plug(eax);
   2533 }
   2534 
   2535 
   2536 void FullCodeGenerator::VisitProperty(Property* expr) {
   2537   Comment cmnt(masm_, "[ Property");
   2538   Expression* key = expr->key();
   2539 
   2540   if (key->IsPropertyName()) {
   2541     VisitForAccumulatorValue(expr->obj());
   2542     __ mov(edx, result_register());
   2543     EmitNamedPropertyLoad(expr);
   2544     PrepareForBailoutForId(expr->LoadId(), TOS_REG);
   2545     context()->Plug(eax);
   2546   } else {
   2547     VisitForStackValue(expr->obj());
   2548     VisitForAccumulatorValue(expr->key());
   2549     __ pop(edx);                     // Object.
   2550     __ mov(ecx, result_register());  // Key.
   2551     EmitKeyedPropertyLoad(expr);
   2552     context()->Plug(eax);
   2553   }
   2554 }
   2555 
   2556 
   2557 void FullCodeGenerator::CallIC(Handle<Code> code,
   2558                                RelocInfo::Mode rmode,
   2559                                TypeFeedbackId ast_id) {
   2560   ic_total_count_++;
   2561   __ call(code, rmode, ast_id);
   2562 }
   2563 
   2564 
   2565 
   2566 
   2567 void FullCodeGenerator::EmitCallWithIC(Call* expr,
   2568                                        Handle<Object> name,
   2569                                        RelocInfo::Mode mode) {
   2570   // Code common for calls using the IC.
   2571   ZoneList<Expression*>* args = expr->arguments();
   2572   int arg_count = args->length();
   2573   { PreservePositionScope scope(masm()->positions_recorder());
   2574     for (int i = 0; i < arg_count; i++) {
   2575       VisitForStackValue(args->at(i));
   2576     }
   2577     __ Set(ecx, Immediate(name));
   2578   }
   2579   // Record source position of the IC call.
   2580   SetSourcePosition(expr->position());
   2581   Handle<Code> ic =
   2582       isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
   2583   CallIC(ic, mode, expr->CallFeedbackId());
   2584   RecordJSReturnSite(expr);
   2585   // Restore context register.
   2586   __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
   2587   context()->Plug(eax);
   2588 }
   2589 
   2590 
   2591 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
   2592                                             Expression* key) {
   2593   // Load the key.
   2594   VisitForAccumulatorValue(key);
   2595 
   2596   // Swap the name of the function and the receiver on the stack to follow
   2597   // the calling convention for call ICs.
   2598   __ pop(ecx);
   2599   __ push(eax);
   2600   __ push(ecx);
   2601 
   2602   // Load the arguments.
   2603   ZoneList<Expression*>* args = expr->arguments();
   2604   int arg_count = args->length();
   2605   { PreservePositionScope scope(masm()->positions_recorder());
   2606     for (int i = 0; i < arg_count; i++) {
   2607       VisitForStackValue(args->at(i));
   2608     }
   2609   }
   2610   // Record source position of the IC call.
   2611   SetSourcePosition(expr->position());
   2612   Handle<Code> ic =
   2613       isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
   2614   __ mov(ecx, Operand(esp, (arg_count + 1) * kPointerSize));  // Key.
   2615   CallIC(ic, RelocInfo::CODE_TARGET, expr->CallFeedbackId());
   2616   RecordJSReturnSite(expr);
   2617   // Restore context register.
   2618   __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
   2619   context()->DropAndPlug(1, eax);  // Drop the key still on the stack.
   2620 }
   2621 
   2622 
   2623 void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
   2624   // Code common for calls using the call stub.
   2625   ZoneList<Expression*>* args = expr->arguments();
   2626   int arg_count = args->length();
   2627   { PreservePositionScope scope(masm()->positions_recorder());
   2628     for (int i = 0; i < arg_count; i++) {
   2629       VisitForStackValue(args->at(i));
   2630     }
   2631   }
   2632   // Record source position for debugger.
   2633   SetSourcePosition(expr->position());
   2634 
   2635   // Record call targets in unoptimized code.
   2636   flags = static_cast<CallFunctionFlags>(flags | RECORD_CALL_TARGET);
   2637   Handle<Object> uninitialized =
   2638       TypeFeedbackCells::UninitializedSentinel(isolate());
   2639   Handle<Cell> cell = isolate()->factory()->NewCell(uninitialized);
   2640   RecordTypeFeedbackCell(expr->CallFeedbackId(), cell);
   2641   __ mov(ebx, cell);
   2642 
   2643   CallFunctionStub stub(arg_count, flags);
   2644   __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
   2645   __ CallStub(&stub, expr->CallFeedbackId());
   2646 
   2647   RecordJSReturnSite(expr);
   2648   // Restore context register.
   2649   __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
   2650   context()->DropAndPlug(1, eax);
   2651 }
   2652 
   2653 
   2654 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
   2655   // Push copy of the first argument or undefined if it doesn't exist.
   2656   if (arg_count > 0) {
   2657     __ push(Operand(esp, arg_count * kPointerSize));
   2658   } else {
   2659     __ push(Immediate(isolate()->factory()->undefined_value()));
   2660   }
   2661 
   2662   // Push the receiver of the enclosing function.
   2663   __ push(Operand(ebp, (2 + info_->scope()->num_parameters()) * kPointerSize));
   2664   // Push the language mode.
   2665   __ push(Immediate(Smi::FromInt(language_mode())));
   2666 
   2667   // Push the start position of the scope the calls resides in.
   2668   __ push(Immediate(Smi::FromInt(scope()->start_position())));
   2669 
   2670   // Do the runtime call.
   2671   __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
   2672 }
   2673 
   2674 
   2675 void FullCodeGenerator::VisitCall(Call* expr) {
   2676 #ifdef DEBUG
   2677   // We want to verify that RecordJSReturnSite gets called on all paths
   2678   // through this function.  Avoid early returns.
   2679   expr->return_is_recorded_ = false;
   2680 #endif
   2681 
   2682   Comment cmnt(masm_, "[ Call");
   2683   Expression* callee = expr->expression();
   2684   VariableProxy* proxy = callee->AsVariableProxy();
   2685   Property* property = callee->AsProperty();
   2686 
   2687   if (proxy != NULL && proxy->var()->is_possibly_eval(isolate())) {
   2688     // In a call to eval, we first call %ResolvePossiblyDirectEval to
   2689     // resolve the function we need to call and the receiver of the call.
   2690     // Then we call the resolved function using the given arguments.
   2691     ZoneList<Expression*>* args = expr->arguments();
   2692     int arg_count = args->length();
   2693     { PreservePositionScope pos_scope(masm()->positions_recorder());
   2694       VisitForStackValue(callee);
   2695       // Reserved receiver slot.
   2696       __ push(Immediate(isolate()->factory()->undefined_value()));
   2697       // Push the arguments.
   2698       for (int i = 0; i < arg_count; i++) {
   2699         VisitForStackValue(args->at(i));
   2700       }
   2701 
   2702       // Push a copy of the function (found below the arguments) and
   2703       // resolve eval.
   2704       __ push(Operand(esp, (arg_count + 1) * kPointerSize));
   2705       EmitResolvePossiblyDirectEval(arg_count);
   2706 
   2707       // The runtime call returns a pair of values in eax (function) and
   2708       // edx (receiver). Touch up the stack with the right values.
   2709       __ mov(Operand(esp, (arg_count + 0) * kPointerSize), edx);
   2710       __ mov(Operand(esp, (arg_count + 1) * kPointerSize), eax);
   2711     }
   2712     // Record source position for debugger.
   2713     SetSourcePosition(expr->position());
   2714     CallFunctionStub stub(arg_count, RECEIVER_MIGHT_BE_IMPLICIT);
   2715     __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
   2716     __ CallStub(&stub);
   2717     RecordJSReturnSite(expr);
   2718     // Restore context register.
   2719     __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
   2720     context()->DropAndPlug(1, eax);
   2721 
   2722   } else if (proxy != NULL && proxy->var()->IsUnallocated()) {
   2723     // Push global object as receiver for the call IC.
   2724     __ push(GlobalObjectOperand());
   2725     EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
   2726 
   2727   } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
   2728     // Call to a lookup slot (dynamically introduced variable).
   2729     Label slow, done;
   2730     { PreservePositionScope scope(masm()->positions_recorder());
   2731       // Generate code for loading from variables potentially shadowed by
   2732       // eval-introduced variables.
   2733       EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
   2734     }
   2735     __ bind(&slow);
   2736     // Call the runtime to find the function to call (returned in eax) and
   2737     // the object holding it (returned in edx).
   2738     __ push(context_register());
   2739     __ push(Immediate(proxy->name()));
   2740     __ CallRuntime(Runtime::kLoadContextSlot, 2);
   2741     __ push(eax);  // Function.
   2742     __ push(edx);  // Receiver.
   2743 
   2744     // If fast case code has been generated, emit code to push the function
   2745     // and receiver and have the slow path jump around this code.
   2746     if (done.is_linked()) {
   2747       Label call;
   2748       __ jmp(&call, Label::kNear);
   2749       __ bind(&done);
   2750       // Push function.
   2751       __ push(eax);
   2752       // The receiver is implicitly the global receiver. Indicate this by
   2753       // passing the hole to the call function stub.
   2754       __ push(Immediate(isolate()->factory()->the_hole_value()));
   2755       __ bind(&call);
   2756     }
   2757 
   2758     // The receiver is either the global receiver or an object found by
   2759     // LoadContextSlot. That object could be the hole if the receiver is
   2760     // implicitly the global object.
   2761     EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT);
   2762 
   2763   } else if (property != NULL) {
   2764     { PreservePositionScope scope(masm()->positions_recorder());
   2765       VisitForStackValue(property->obj());
   2766     }
   2767     if (property->key()->IsPropertyName()) {
   2768       EmitCallWithIC(expr,
   2769                      property->key()->AsLiteral()->value(),
   2770                      RelocInfo::CODE_TARGET);
   2771     } else {
   2772       EmitKeyedCallWithIC(expr, property->key());
   2773     }
   2774 
   2775   } else {
   2776     // Call to an arbitrary expression not handled specially above.
   2777     { PreservePositionScope scope(masm()->positions_recorder());
   2778       VisitForStackValue(callee);
   2779     }
   2780     // Load global receiver object.
   2781     __ mov(ebx, GlobalObjectOperand());
   2782     __ push(FieldOperand(ebx, GlobalObject::kGlobalReceiverOffset));
   2783     // Emit function call.
   2784     EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
   2785   }
   2786 
   2787 #ifdef DEBUG
   2788   // RecordJSReturnSite should have been called.
   2789   ASSERT(expr->return_is_recorded_);
   2790 #endif
   2791 }
   2792 
   2793 
   2794 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
   2795   Comment cmnt(masm_, "[ CallNew");
   2796   // According to ECMA-262, section 11.2.2, page 44, the function
   2797   // expression in new calls must be evaluated before the
   2798   // arguments.
   2799 
   2800   // Push constructor on the stack.  If it's not a function it's used as
   2801   // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
   2802   // ignored.
   2803   VisitForStackValue(expr->expression());
   2804 
   2805   // Push the arguments ("left-to-right") on the stack.
   2806   ZoneList<Expression*>* args = expr->arguments();
   2807   int arg_count = args->length();
   2808   for (int i = 0; i < arg_count; i++) {
   2809     VisitForStackValue(args->at(i));
   2810   }
   2811 
   2812   // Call the construct call builtin that handles allocation and
   2813   // constructor invocation.
   2814   SetSourcePosition(expr->position());
   2815 
   2816   // Load function and argument count into edi and eax.
   2817   __ Set(eax, Immediate(arg_count));
   2818   __ mov(edi, Operand(esp, arg_count * kPointerSize));
   2819 
   2820   // Record call targets in unoptimized code.
   2821   Handle<Object> uninitialized =
   2822       TypeFeedbackCells::UninitializedSentinel(isolate());
   2823   Handle<Cell> cell = isolate()->factory()->NewCell(uninitialized);
   2824   RecordTypeFeedbackCell(expr->CallNewFeedbackId(), cell);
   2825   __ mov(ebx, cell);
   2826 
   2827   CallConstructStub stub(RECORD_CALL_TARGET);
   2828   __ call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
   2829   PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
   2830   context()->Plug(eax);
   2831 }
   2832 
   2833 
   2834 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
   2835   ZoneList<Expression*>* args = expr->arguments();
   2836   ASSERT(args->length() == 1);
   2837 
   2838   VisitForAccumulatorValue(args->at(0));
   2839 
   2840   Label materialize_true, materialize_false;
   2841   Label* if_true = NULL;
   2842   Label* if_false = NULL;
   2843   Label* fall_through = NULL;
   2844   context()->PrepareTest(&materialize_true, &materialize_false,
   2845                          &if_true, &if_false, &fall_through);
   2846 
   2847   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2848   __ test(eax, Immediate(kSmiTagMask));
   2849   Split(zero, if_true, if_false, fall_through);
   2850 
   2851   context()->Plug(if_true, if_false);
   2852 }
   2853 
   2854 
   2855 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
   2856   ZoneList<Expression*>* args = expr->arguments();
   2857   ASSERT(args->length() == 1);
   2858 
   2859   VisitForAccumulatorValue(args->at(0));
   2860 
   2861   Label materialize_true, materialize_false;
   2862   Label* if_true = NULL;
   2863   Label* if_false = NULL;
   2864   Label* fall_through = NULL;
   2865   context()->PrepareTest(&materialize_true, &materialize_false,
   2866                          &if_true, &if_false, &fall_through);
   2867 
   2868   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2869   __ test(eax, Immediate(kSmiTagMask | 0x80000000));
   2870   Split(zero, if_true, if_false, fall_through);
   2871 
   2872   context()->Plug(if_true, if_false);
   2873 }
   2874 
   2875 
   2876 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
   2877   ZoneList<Expression*>* args = expr->arguments();
   2878   ASSERT(args->length() == 1);
   2879 
   2880   VisitForAccumulatorValue(args->at(0));
   2881 
   2882   Label materialize_true, materialize_false;
   2883   Label* if_true = NULL;
   2884   Label* if_false = NULL;
   2885   Label* fall_through = NULL;
   2886   context()->PrepareTest(&materialize_true, &materialize_false,
   2887                          &if_true, &if_false, &fall_through);
   2888 
   2889   __ JumpIfSmi(eax, if_false);
   2890   __ cmp(eax, isolate()->factory()->null_value());
   2891   __ j(equal, if_true);
   2892   __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
   2893   // Undetectable objects behave like undefined when tested with typeof.
   2894   __ movzx_b(ecx, FieldOperand(ebx, Map::kBitFieldOffset));
   2895   __ test(ecx, Immediate(1 << Map::kIsUndetectable));
   2896   __ j(not_zero, if_false);
   2897   __ movzx_b(ecx, FieldOperand(ebx, Map::kInstanceTypeOffset));
   2898   __ cmp(ecx, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
   2899   __ j(below, if_false);
   2900   __ cmp(ecx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
   2901   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2902   Split(below_equal, if_true, if_false, fall_through);
   2903 
   2904   context()->Plug(if_true, if_false);
   2905 }
   2906 
   2907 
   2908 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
   2909   ZoneList<Expression*>* args = expr->arguments();
   2910   ASSERT(args->length() == 1);
   2911 
   2912   VisitForAccumulatorValue(args->at(0));
   2913 
   2914   Label materialize_true, materialize_false;
   2915   Label* if_true = NULL;
   2916   Label* if_false = NULL;
   2917   Label* fall_through = NULL;
   2918   context()->PrepareTest(&materialize_true, &materialize_false,
   2919                          &if_true, &if_false, &fall_through);
   2920 
   2921   __ JumpIfSmi(eax, if_false);
   2922   __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ebx);
   2923   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2924   Split(above_equal, if_true, if_false, fall_through);
   2925 
   2926   context()->Plug(if_true, if_false);
   2927 }
   2928 
   2929 
   2930 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
   2931   ZoneList<Expression*>* args = expr->arguments();
   2932   ASSERT(args->length() == 1);
   2933 
   2934   VisitForAccumulatorValue(args->at(0));
   2935 
   2936   Label materialize_true, materialize_false;
   2937   Label* if_true = NULL;
   2938   Label* if_false = NULL;
   2939   Label* fall_through = NULL;
   2940   context()->PrepareTest(&materialize_true, &materialize_false,
   2941                          &if_true, &if_false, &fall_through);
   2942 
   2943   __ JumpIfSmi(eax, if_false);
   2944   __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
   2945   __ movzx_b(ebx, FieldOperand(ebx, Map::kBitFieldOffset));
   2946   __ test(ebx, Immediate(1 << Map::kIsUndetectable));
   2947   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2948   Split(not_zero, if_true, if_false, fall_through);
   2949 
   2950   context()->Plug(if_true, if_false);
   2951 }
   2952 
   2953 
   2954 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
   2955     CallRuntime* expr) {
   2956   ZoneList<Expression*>* args = expr->arguments();
   2957   ASSERT(args->length() == 1);
   2958 
   2959   VisitForAccumulatorValue(args->at(0));
   2960 
   2961   Label materialize_true, materialize_false;
   2962   Label* if_true = NULL;
   2963   Label* if_false = NULL;
   2964   Label* fall_through = NULL;
   2965   context()->PrepareTest(&materialize_true, &materialize_false,
   2966                          &if_true, &if_false, &fall_through);
   2967 
   2968   __ AssertNotSmi(eax);
   2969 
   2970   // Check whether this map has already been checked to be safe for default
   2971   // valueOf.
   2972   __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
   2973   __ test_b(FieldOperand(ebx, Map::kBitField2Offset),
   2974             1 << Map::kStringWrapperSafeForDefaultValueOf);
   2975   __ j(not_zero, if_true);
   2976 
   2977   // Check for fast case object. Return false for slow case objects.
   2978   __ mov(ecx, FieldOperand(eax, JSObject::kPropertiesOffset));
   2979   __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
   2980   __ cmp(ecx, isolate()->factory()->hash_table_map());
   2981   __ j(equal, if_false);
   2982 
   2983   // Look for valueOf string in the descriptor array, and indicate false if
   2984   // found. Since we omit an enumeration index check, if it is added via a
   2985   // transition that shares its descriptor array, this is a false positive.
   2986   Label entry, loop, done;
   2987 
   2988   // Skip loop if no descriptors are valid.
   2989   __ NumberOfOwnDescriptors(ecx, ebx);
   2990   __ cmp(ecx, 0);
   2991   __ j(equal, &done);
   2992 
   2993   __ LoadInstanceDescriptors(ebx, ebx);
   2994   // ebx: descriptor array.
   2995   // ecx: valid entries in the descriptor array.
   2996   // Calculate the end of the descriptor array.
   2997   STATIC_ASSERT(kSmiTag == 0);
   2998   STATIC_ASSERT(kSmiTagSize == 1);
   2999   STATIC_ASSERT(kPointerSize == 4);
   3000   __ imul(ecx, ecx, DescriptorArray::kDescriptorSize);
   3001   __ lea(ecx, Operand(ebx, ecx, times_2, DescriptorArray::kFirstOffset));
   3002   // Calculate location of the first key name.
   3003   __ add(ebx, Immediate(DescriptorArray::kFirstOffset));
   3004   // Loop through all the keys in the descriptor array. If one of these is the
   3005   // internalized string "valueOf" the result is false.
   3006   __ jmp(&entry);
   3007   __ bind(&loop);
   3008   __ mov(edx, FieldOperand(ebx, 0));
   3009   __ cmp(edx, isolate()->factory()->value_of_string());
   3010   __ j(equal, if_false);
   3011   __ add(ebx, Immediate(DescriptorArray::kDescriptorSize * kPointerSize));
   3012   __ bind(&entry);
   3013   __ cmp(ebx, ecx);
   3014   __ j(not_equal, &loop);
   3015 
   3016   __ bind(&done);
   3017 
   3018   // Reload map as register ebx was used as temporary above.
   3019   __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
   3020 
   3021   // If a valueOf property is not found on the object check that its
   3022   // prototype is the un-modified String prototype. If not result is false.
   3023   __ mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
   3024   __ JumpIfSmi(ecx, if_false);
   3025   __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
   3026   __ mov(edx, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
   3027   __ mov(edx,
   3028          FieldOperand(edx, GlobalObject::kNativeContextOffset));
   3029   __ cmp(ecx,
   3030          ContextOperand(edx,
   3031                         Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
   3032   __ j(not_equal, if_false);
   3033   // Set the bit in the map to indicate that it has been checked safe for
   3034   // default valueOf and set true result.
   3035   __ or_(FieldOperand(ebx, Map::kBitField2Offset),
   3036          Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
   3037   __ jmp(if_true);
   3038 
   3039   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   3040   context()->Plug(if_true, if_false);
   3041 }
   3042 
   3043 
   3044 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
   3045   ZoneList<Expression*>* args = expr->arguments();
   3046   ASSERT(args->length() == 1);
   3047 
   3048   VisitForAccumulatorValue(args->at(0));
   3049 
   3050   Label materialize_true, materialize_false;
   3051   Label* if_true = NULL;
   3052   Label* if_false = NULL;
   3053   Label* fall_through = NULL;
   3054   context()->PrepareTest(&materialize_true, &materialize_false,
   3055                          &if_true, &if_false, &fall_through);
   3056 
   3057   __ JumpIfSmi(eax, if_false);
   3058   __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
   3059   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   3060   Split(equal, if_true, if_false, fall_through);
   3061 
   3062   context()->Plug(if_true, if_false);
   3063 }
   3064 
   3065 
   3066 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
   3067   ZoneList<Expression*>* args = expr->arguments();
   3068   ASSERT(args->length() == 1);
   3069 
   3070   VisitForAccumulatorValue(args->at(0));
   3071 
   3072   Label materialize_true, materialize_false;
   3073   Label* if_true = NULL;
   3074   Label* if_false = NULL;
   3075   Label* fall_through = NULL;
   3076   context()->PrepareTest(&materialize_true, &materialize_false,
   3077                          &if_true, &if_false, &fall_through);
   3078 
   3079   __ JumpIfSmi(eax, if_false);
   3080   __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
   3081   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   3082   Split(equal, if_true, if_false, fall_through);
   3083 
   3084   context()->Plug(if_true, if_false);
   3085 }
   3086 
   3087 
   3088 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
   3089   ZoneList<Expression*>* args = expr->arguments();
   3090   ASSERT(args->length() == 1);
   3091 
   3092   VisitForAccumulatorValue(args->at(0));
   3093 
   3094   Label materialize_true, materialize_false;
   3095   Label* if_true = NULL;
   3096   Label* if_false = NULL;
   3097   Label* fall_through = NULL;
   3098   context()->PrepareTest(&materialize_true, &materialize_false,
   3099                          &if_true, &if_false, &fall_through);
   3100 
   3101   __ JumpIfSmi(eax, if_false);
   3102   __ CmpObjectType(eax, JS_REGEXP_TYPE, ebx);
   3103   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   3104   Split(equal, if_true, if_false, fall_through);
   3105 
   3106   context()->Plug(if_true, if_false);
   3107 }
   3108 
   3109 
   3110 
   3111 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
   3112   ASSERT(expr->arguments()->length() == 0);
   3113 
   3114   Label materialize_true, materialize_false;
   3115   Label* if_true = NULL;
   3116   Label* if_false = NULL;
   3117   Label* fall_through = NULL;
   3118   context()->PrepareTest(&materialize_true, &materialize_false,
   3119                          &if_true, &if_false, &fall_through);
   3120 
   3121   // Get the frame pointer for the calling frame.
   3122   __ mov(eax, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
   3123 
   3124   // Skip the arguments adaptor frame if it exists.
   3125   Label check_frame_marker;
   3126   __ cmp(Operand(eax, StandardFrameConstants::kContextOffset),
   3127          Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
   3128   __ j(not_equal, &check_frame_marker);
   3129   __ mov(eax, Operand(eax, StandardFrameConstants::kCallerFPOffset));
   3130 
   3131   // Check the marker in the calling frame.
   3132   __ bind(&check_frame_marker);
   3133   __ cmp(Operand(eax, StandardFrameConstants::kMarkerOffset),
   3134          Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
   3135   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   3136   Split(equal, if_true, if_false, fall_through);
   3137 
   3138   context()->Plug(if_true, if_false);
   3139 }
   3140 
   3141 
   3142 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
   3143   ZoneList<Expression*>* args = expr->arguments();
   3144   ASSERT(args->length() == 2);
   3145 
   3146   // Load the two objects into registers and perform the comparison.
   3147   VisitForStackValue(args->at(0));
   3148   VisitForAccumulatorValue(args->at(1));
   3149 
   3150   Label materialize_true, materialize_false;
   3151   Label* if_true = NULL;
   3152   Label* if_false = NULL;
   3153   Label* fall_through = NULL;
   3154   context()->PrepareTest(&materialize_true, &materialize_false,
   3155                          &if_true, &if_false, &fall_through);
   3156 
   3157   __ pop(ebx);
   3158   __ cmp(eax, ebx);
   3159   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   3160   Split(equal, if_true, if_false, fall_through);
   3161 
   3162   context()->Plug(if_true, if_false);
   3163 }
   3164 
   3165 
   3166 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
   3167   ZoneList<Expression*>* args = expr->arguments();
   3168   ASSERT(args->length() == 1);
   3169 
   3170   // ArgumentsAccessStub expects the key in edx and the formal
   3171   // parameter count in eax.
   3172   VisitForAccumulatorValue(args->at(0));
   3173   __ mov(edx, eax);
   3174   __ Set(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
   3175   ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
   3176   __ CallStub(&stub);
   3177   context()->Plug(eax);
   3178 }
   3179 
   3180 
   3181 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
   3182   ASSERT(expr->arguments()->length() == 0);
   3183 
   3184   Label exit;
   3185   // Get the number of formal parameters.
   3186   __ Set(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
   3187 
   3188   // Check if the calling frame is an arguments adaptor frame.
   3189   __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
   3190   __ cmp(Operand(ebx, StandardFrameConstants::kContextOffset),
   3191          Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
   3192   __ j(not_equal, &exit);
   3193 
   3194   // Arguments adaptor case: Read the arguments length from the
   3195   // adaptor frame.
   3196   __ mov(eax, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
   3197 
   3198   __ bind(&exit);
   3199   __ AssertSmi(eax);
   3200   context()->Plug(eax);
   3201 }
   3202 
   3203 
   3204 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
   3205   ZoneList<Expression*>* args = expr->arguments();
   3206   ASSERT(args->length() == 1);
   3207   Label done, null, function, non_function_constructor;
   3208 
   3209   VisitForAccumulatorValue(args->at(0));
   3210 
   3211   // If the object is a smi, we return null.
   3212   __ JumpIfSmi(eax, &null);
   3213 
   3214   // Check that the object is a JS object but take special care of JS
   3215   // functions to make sure they have 'Function' as their class.
   3216   // Assume that there are only two callable types, and one of them is at
   3217   // either end of the type range for JS object types. Saves extra comparisons.
   3218   STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
   3219   __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, eax);
   3220   // Map is now in eax.
   3221   __ j(below, &null);
   3222   STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
   3223                 FIRST_SPEC_OBJECT_TYPE + 1);
   3224   __ j(equal, &function);
   3225 
   3226   __ CmpInstanceType(eax, LAST_SPEC_OBJECT_TYPE);
   3227   STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
   3228                 LAST_SPEC_OBJECT_TYPE - 1);
   3229   __ j(equal, &function);
   3230   // Assume that there is no larger type.
   3231   STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
   3232 
   3233   // Check if the constructor in the map is a JS function.
   3234   __ mov(eax, FieldOperand(eax, Map::kConstructorOffset));
   3235   __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
   3236   __ j(not_equal, &non_function_constructor);
   3237 
   3238   // eax now contains the constructor function. Grab the
   3239   // instance class name from there.
   3240   __ mov(eax, FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset));
   3241   __ mov(eax, FieldOperand(eax, SharedFunctionInfo::kInstanceClassNameOffset));
   3242   __ jmp(&done);
   3243 
   3244   // Functions have class 'Function'.
   3245   __ bind(&function);
   3246   __ mov(eax, isolate()->factory()->function_class_string());
   3247   __ jmp(&done);
   3248 
   3249   // Objects with a non-function constructor have class 'Object'.
   3250   __ bind(&non_function_constructor);
   3251   __ mov(eax, isolate()->factory()->Object_string());
   3252   __ jmp(&done);
   3253 
   3254   // Non-JS objects have class null.
   3255   __ bind(&null);
   3256   __ mov(eax, isolate()->factory()->null_value());
   3257 
   3258   // All done.
   3259   __ bind(&done);
   3260 
   3261   context()->Plug(eax);
   3262 }
   3263 
   3264 
   3265 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
   3266   // Conditionally generate a log call.
   3267   // Args:
   3268   //   0 (literal string): The type of logging (corresponds to the flags).
   3269   //     This is used to determine whether or not to generate the log call.
   3270   //   1 (string): Format string.  Access the string at argument index 2
   3271   //     with '%2s' (see Logger::LogRuntime for all the formats).
   3272   //   2 (array): Arguments to the format string.
   3273   ZoneList<Expression*>* args = expr->arguments();
   3274   ASSERT_EQ(args->length(), 3);
   3275   if (CodeGenerator::ShouldGenerateLog(args->at(0))) {
   3276     VisitForStackValue(args->at(1));
   3277     VisitForStackValue(args->at(2));
   3278     __ CallRuntime(Runtime::kLog, 2);
   3279   }
   3280   // Finally, we're expected to leave a value on the top of the stack.
   3281   __ mov(eax, isolate()->factory()->undefined_value());
   3282   context()->Plug(eax);
   3283 }
   3284 
   3285 
   3286 void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) {
   3287   ASSERT(expr->arguments()->length() == 0);
   3288 
   3289   Label slow_allocate_heapnumber;
   3290   Label heapnumber_allocated;
   3291 
   3292   __ AllocateHeapNumber(edi, ebx, ecx, &slow_allocate_heapnumber);
   3293   __ jmp(&heapnumber_allocated);
   3294 
   3295   __ bind(&slow_allocate_heapnumber);
   3296   // Allocate a heap number.
   3297   __ CallRuntime(Runtime::kNumberAlloc, 0);
   3298   __ mov(edi, eax);
   3299 
   3300   __ bind(&heapnumber_allocated);
   3301 
   3302   __ PrepareCallCFunction(1, ebx);
   3303   __ mov(eax, ContextOperand(context_register(), Context::GLOBAL_OBJECT_INDEX));
   3304   __ mov(eax, FieldOperand(eax, GlobalObject::kNativeContextOffset));
   3305   __ mov(Operand(esp, 0), eax);
   3306   __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
   3307 
   3308   // Convert 32 random bits in eax to 0.(32 random bits) in a double
   3309   // by computing:
   3310   // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
   3311   // This is implemented on both SSE2 and FPU.
   3312   if (CpuFeatures::IsSupported(SSE2)) {
   3313     CpuFeatureScope fscope(masm(), SSE2);
   3314     __ mov(ebx, Immediate(0x49800000));  // 1.0 x 2^20 as single.
   3315     __ movd(xmm1, ebx);
   3316     __ movd(xmm0, eax);
   3317     __ cvtss2sd(xmm1, xmm1);
   3318     __ xorps(xmm0, xmm1);
   3319     __ subsd(xmm0, xmm1);
   3320     __ movdbl(FieldOperand(edi, HeapNumber::kValueOffset), xmm0);
   3321   } else {
   3322     // 0x4130000000000000 is 1.0 x 2^20 as a double.
   3323     __ mov(FieldOperand(edi, HeapNumber::kExponentOffset),
   3324            Immediate(0x41300000));
   3325     __ mov(FieldOperand(edi, HeapNumber::kMantissaOffset), eax);
   3326     __ fld_d(FieldOperand(edi, HeapNumber::kValueOffset));
   3327     __ mov(FieldOperand(edi, HeapNumber::kMantissaOffset), Immediate(0));
   3328     __ fld_d(FieldOperand(edi, HeapNumber::kValueOffset));
   3329     __ fsubp(1);
   3330     __ fstp_d(FieldOperand(edi, HeapNumber::kValueOffset));
   3331   }
   3332   __ mov(eax, edi);
   3333   context()->Plug(eax);
   3334 }
   3335 
   3336 
   3337 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
   3338   // Load the arguments on the stack and call the stub.
   3339   SubStringStub stub;
   3340   ZoneList<Expression*>* args = expr->arguments();
   3341   ASSERT(args->length() == 3);
   3342   VisitForStackValue(args->at(0));
   3343   VisitForStackValue(args->at(1));
   3344   VisitForStackValue(args->at(2));
   3345   __ CallStub(&stub);
   3346   context()->Plug(eax);
   3347 }
   3348 
   3349 
   3350 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
   3351   // Load the arguments on the stack and call the stub.
   3352   RegExpExecStub stub;
   3353   ZoneList<Expression*>* args = expr->arguments();
   3354   ASSERT(args->length() == 4);
   3355   VisitForStackValue(args->at(0));
   3356   VisitForStackValue(args->at(1));
   3357   VisitForStackValue(args->at(2));
   3358   VisitForStackValue(args->at(3));
   3359   __ CallStub(&stub);
   3360   context()->Plug(eax);
   3361 }
   3362 
   3363 
   3364 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
   3365   ZoneList<Expression*>* args = expr->arguments();
   3366   ASSERT(args->length() == 1);
   3367 
   3368   VisitForAccumulatorValue(args->at(0));  // Load the object.
   3369 
   3370   Label done;
   3371   // If the object is a smi return the object.
   3372   __ JumpIfSmi(eax, &done, Label::kNear);
   3373   // If the object is not a value type, return the object.
   3374   __ CmpObjectType(eax, JS_VALUE_TYPE, ebx);
   3375   __ j(not_equal, &done, Label::kNear);
   3376   __ mov(eax, FieldOperand(eax, JSValue::kValueOffset));
   3377 
   3378   __ bind(&done);
   3379   context()->Plug(eax);
   3380 }
   3381 
   3382 
   3383 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
   3384   ZoneList<Expression*>* args = expr->arguments();
   3385   ASSERT(args->length() == 2);
   3386   ASSERT_NE(NULL, args->at(1)->AsLiteral());
   3387   Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
   3388 
   3389   VisitForAccumulatorValue(args->at(0));  // Load the object.
   3390 
   3391   Label runtime, done, not_date_object;
   3392   Register object = eax;
   3393   Register result = eax;
   3394   Register scratch = ecx;
   3395 
   3396   __ JumpIfSmi(object, &not_date_object);
   3397   __ CmpObjectType(object, JS_DATE_TYPE, scratch);
   3398   __ j(not_equal, &not_date_object);
   3399 
   3400   if (index->value() == 0) {
   3401     __ mov(result, FieldOperand(object, JSDate::kValueOffset));
   3402     __ jmp(&done);
   3403   } else {
   3404     if (index->value() < JSDate::kFirstUncachedField) {
   3405       ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
   3406       __ mov(scratch, Operand::StaticVariable(stamp));
   3407       __ cmp(scratch, FieldOperand(object, JSDate::kCacheStampOffset));
   3408       __ j(not_equal, &runtime, Label::kNear);
   3409       __ mov(result, FieldOperand(object, JSDate::kValueOffset +
   3410                                           kPointerSize * index->value()));
   3411       __ jmp(&done);
   3412     }
   3413     __ bind(&runtime);
   3414     __ PrepareCallCFunction(2, scratch);
   3415     __ mov(Operand(esp, 0), object);
   3416     __ mov(Operand(esp, 1 * kPointerSize), Immediate(index));
   3417     __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
   3418     __ jmp(&done);
   3419   }
   3420 
   3421   __ bind(&not_date_object);
   3422   __ CallRuntime(Runtime::kThrowNotDateError, 0);
   3423   __ bind(&done);
   3424   context()->Plug(result);
   3425 }
   3426 
   3427 
   3428 void FullCodeGenerator::EmitSeqStringSetCharCheck(Register string,
   3429                                                   Register index,
   3430                                                   Register value,
   3431                                                   uint32_t encoding_mask) {
   3432   __ test(index, Immediate(kSmiTagMask));
   3433   __ Check(zero, kNonSmiIndex);
   3434   __ test(value, Immediate(kSmiTagMask));
   3435   __ Check(zero, kNonSmiValue);
   3436 
   3437   __ cmp(index, FieldOperand(string, String::kLengthOffset));
   3438   __ Check(less, kIndexIsTooLarge);
   3439 
   3440   __ cmp(index, Immediate(Smi::FromInt(0)));
   3441   __ Check(greater_equal, kIndexIsNegative);
   3442 
   3443   __ push(value);
   3444   __ mov(value, FieldOperand(string, HeapObject::kMapOffset));
   3445   __ movzx_b(value, FieldOperand(value, Map::kInstanceTypeOffset));
   3446 
   3447   __ and_(value, Immediate(kStringRepresentationMask | kStringEncodingMask));
   3448   __ cmp(value, Immediate(encoding_mask));
   3449   __ Check(equal, kUnexpectedStringType);
   3450   __ pop(value);
   3451 }
   3452 
   3453 
   3454 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
   3455   ZoneList<Expression*>* args = expr->arguments();
   3456   ASSERT_EQ(3, args->length());
   3457 
   3458   Register string = eax;
   3459   Register index = ebx;
   3460   Register value = ecx;
   3461 
   3462   VisitForStackValue(args->at(1));  // index
   3463   VisitForStackValue(args->at(2));  // value
   3464   __ pop(value);
   3465   __ pop(index);
   3466   VisitForAccumulatorValue(args->at(0));  // string
   3467 
   3468 
   3469   if (FLAG_debug_code) {
   3470     static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
   3471     EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
   3472   }
   3473 
   3474   __ SmiUntag(value);
   3475   __ SmiUntag(index);
   3476   __ mov_b(FieldOperand(string, index, times_1, SeqOneByteString::kHeaderSize),
   3477            value);
   3478   context()->Plug(string);
   3479 }
   3480 
   3481 
   3482 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
   3483   ZoneList<Expression*>* args = expr->arguments();
   3484   ASSERT_EQ(3, args->length());
   3485 
   3486   Register string = eax;
   3487   Register index = ebx;
   3488   Register value = ecx;
   3489 
   3490   VisitForStackValue(args->at(1));  // index
   3491   VisitForStackValue(args->at(2));  // value
   3492   __ pop(value);
   3493   __ pop(index);
   3494   VisitForAccumulatorValue(args->at(0));  // string
   3495 
   3496   if (FLAG_debug_code) {
   3497     static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
   3498     EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
   3499   }
   3500 
   3501   __ SmiUntag(value);
   3502   // No need to untag a smi for two-byte addressing.
   3503   __ mov_w(FieldOperand(string, index, times_1, SeqTwoByteString::kHeaderSize),
   3504            value);
   3505   context()->Plug(string);
   3506 }
   3507 
   3508 
   3509 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
   3510   // Load the arguments on the stack and call the runtime function.
   3511   ZoneList<Expression*>* args = expr->arguments();
   3512   ASSERT(args->length() == 2);
   3513   VisitForStackValue(args->at(0));
   3514   VisitForStackValue(args->at(1));
   3515 
   3516   if (CpuFeatures::IsSupported(SSE2)) {
   3517     MathPowStub stub(MathPowStub::ON_STACK);
   3518     __ CallStub(&stub);
   3519   } else {
   3520     __ CallRuntime(Runtime::kMath_pow, 2);
   3521   }
   3522   context()->Plug(eax);
   3523 }
   3524 
   3525 
   3526 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
   3527   ZoneList<Expression*>* args = expr->arguments();
   3528   ASSERT(args->length() == 2);
   3529 
   3530   VisitForStackValue(args->at(0));  // Load the object.
   3531   VisitForAccumulatorValue(args->at(1));  // Load the value.
   3532   __ pop(ebx);  // eax = value. ebx = object.
   3533 
   3534   Label done;
   3535   // If the object is a smi, return the value.
   3536   __ JumpIfSmi(ebx, &done, Label::kNear);
   3537 
   3538   // If the object is not a value type, return the value.
   3539   __ CmpObjectType(ebx, JS_VALUE_TYPE, ecx);
   3540   __ j(not_equal, &done, Label::kNear);
   3541 
   3542   // Store the value.
   3543   __ mov(FieldOperand(ebx, JSValue::kValueOffset), eax);
   3544 
   3545   // Update the write barrier.  Save the value as it will be
   3546   // overwritten by the write barrier code and is needed afterward.
   3547   __ mov(edx, eax);
   3548   __ RecordWriteField(ebx, JSValue::kValueOffset, edx, ecx, kDontSaveFPRegs);
   3549 
   3550   __ bind(&done);
   3551   context()->Plug(eax);
   3552 }
   3553 
   3554 
   3555 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
   3556   ZoneList<Expression*>* args = expr->arguments();
   3557   ASSERT_EQ(args->length(), 1);
   3558 
   3559   // Load the argument on the stack and call the stub.
   3560   VisitForStackValue(args->at(0));
   3561 
   3562   NumberToStringStub stub;
   3563   __ CallStub(&stub);
   3564   context()->Plug(eax);
   3565 }
   3566 
   3567 
   3568 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
   3569   ZoneList<Expression*>* args = expr->arguments();
   3570   ASSERT(args->length() == 1);
   3571 
   3572   VisitForAccumulatorValue(args->at(0));
   3573 
   3574   Label done;
   3575   StringCharFromCodeGenerator generator(eax, ebx);
   3576   generator.GenerateFast(masm_);
   3577   __ jmp(&done);
   3578 
   3579   NopRuntimeCallHelper call_helper;
   3580   generator.GenerateSlow(masm_, call_helper);
   3581 
   3582   __ bind(&done);
   3583   context()->Plug(ebx);
   3584 }
   3585 
   3586 
   3587 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
   3588   ZoneList<Expression*>* args = expr->arguments();
   3589   ASSERT(args->length() == 2);
   3590 
   3591   VisitForStackValue(args->at(0));
   3592   VisitForAccumulatorValue(args->at(1));
   3593 
   3594   Register object = ebx;
   3595   Register index = eax;
   3596   Register result = edx;
   3597 
   3598   __ pop(object);
   3599 
   3600   Label need_conversion;
   3601   Label index_out_of_range;
   3602   Label done;
   3603   StringCharCodeAtGenerator generator(object,
   3604                                       index,
   3605                                       result,
   3606                                       &need_conversion,
   3607                                       &need_conversion,
   3608                                       &index_out_of_range,
   3609                                       STRING_INDEX_IS_NUMBER);
   3610   generator.GenerateFast(masm_);
   3611   __ jmp(&done);
   3612 
   3613   __ bind(&index_out_of_range);
   3614   // When the index is out of range, the spec requires us to return
   3615   // NaN.
   3616   __ Set(result, Immediate(isolate()->factory()->nan_value()));
   3617   __ jmp(&done);
   3618 
   3619   __ bind(&need_conversion);
   3620   // Move the undefined value into the result register, which will
   3621   // trigger conversion.
   3622   __ Set(result, Immediate(isolate()->factory()->undefined_value()));
   3623   __ jmp(&done);
   3624 
   3625   NopRuntimeCallHelper call_helper;
   3626   generator.GenerateSlow(masm_, call_helper);
   3627 
   3628   __ bind(&done);
   3629   context()->Plug(result);
   3630 }
   3631 
   3632 
   3633 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
   3634   ZoneList<Expression*>* args = expr->arguments();
   3635   ASSERT(args->length() == 2);
   3636 
   3637   VisitForStackValue(args->at(0));
   3638   VisitForAccumulatorValue(args->at(1));
   3639 
   3640   Register object = ebx;
   3641   Register index = eax;
   3642   Register scratch = edx;
   3643   Register result = eax;
   3644 
   3645   __ pop(object);
   3646 
   3647   Label need_conversion;
   3648   Label index_out_of_range;
   3649   Label done;
   3650   StringCharAtGenerator generator(object,
   3651                                   index,
   3652                                   scratch,
   3653                                   result,
   3654                                   &need_conversion,
   3655                                   &need_conversion,
   3656                                   &index_out_of_range,
   3657                                   STRING_INDEX_IS_NUMBER);
   3658   generator.GenerateFast(masm_);
   3659   __ jmp(&done);
   3660 
   3661   __ bind(&index_out_of_range);
   3662   // When the index is out of range, the spec requires us to return
   3663   // the empty string.
   3664   __ Set(result, Immediate(isolate()->factory()->empty_string()));
   3665   __ jmp(&done);
   3666 
   3667   __ bind(&need_conversion);
   3668   // Move smi zero into the result register, which will trigger
   3669   // conversion.
   3670   __ Set(result, Immediate(Smi::FromInt(0)));
   3671   __ jmp(&done);
   3672 
   3673   NopRuntimeCallHelper call_helper;
   3674   generator.GenerateSlow(masm_, call_helper);
   3675 
   3676   __ bind(&done);
   3677   context()->Plug(result);
   3678 }
   3679 
   3680 
   3681 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
   3682   ZoneList<Expression*>* args = expr->arguments();
   3683   ASSERT_EQ(2, args->length());
   3684 
   3685   VisitForStackValue(args->at(0));
   3686   VisitForStackValue(args->at(1));
   3687 
   3688   StringAddStub stub(STRING_ADD_CHECK_BOTH);
   3689   __ CallStub(&stub);
   3690   context()->Plug(eax);
   3691 }
   3692 
   3693 
   3694 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
   3695   ZoneList<Expression*>* args = expr->arguments();
   3696   ASSERT_EQ(2, args->length());
   3697 
   3698   VisitForStackValue(args->at(0));
   3699   VisitForStackValue(args->at(1));
   3700 
   3701   StringCompareStub stub;
   3702   __ CallStub(&stub);
   3703   context()->Plug(eax);
   3704 }
   3705 
   3706 
   3707 void FullCodeGenerator::EmitMathSin(CallRuntime* expr) {
   3708   // Load the argument on the stack and call the stub.
   3709   TranscendentalCacheStub stub(TranscendentalCache::SIN,
   3710                                TranscendentalCacheStub::TAGGED);
   3711   ZoneList<Expression*>* args = expr->arguments();
   3712   ASSERT(args->length() == 1);
   3713   VisitForStackValue(args->at(0));
   3714   __ CallStub(&stub);
   3715   context()->Plug(eax);
   3716 }
   3717 
   3718 
   3719 void FullCodeGenerator::EmitMathCos(CallRuntime* expr) {
   3720   // Load the argument on the stack and call the stub.
   3721   TranscendentalCacheStub stub(TranscendentalCache::COS,
   3722                                TranscendentalCacheStub::TAGGED);
   3723   ZoneList<Expression*>* args = expr->arguments();
   3724   ASSERT(args->length() == 1);
   3725   VisitForStackValue(args->at(0));
   3726   __ CallStub(&stub);
   3727   context()->Plug(eax);
   3728 }
   3729 
   3730 
   3731 void FullCodeGenerator::EmitMathTan(CallRuntime* expr) {
   3732   // Load the argument on the stack and call the stub.
   3733   TranscendentalCacheStub stub(TranscendentalCache::TAN,
   3734                                TranscendentalCacheStub::TAGGED);
   3735   ZoneList<Expression*>* args = expr->arguments();
   3736   ASSERT(args->length() == 1);
   3737   VisitForStackValue(args->at(0));
   3738   __ CallStub(&stub);
   3739   context()->Plug(eax);
   3740 }
   3741 
   3742 
   3743 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
   3744   // Load the argument on the stack and call the stub.
   3745   TranscendentalCacheStub stub(TranscendentalCache::LOG,
   3746                                TranscendentalCacheStub::TAGGED);
   3747   ZoneList<Expression*>* args = expr->arguments();
   3748   ASSERT(args->length() == 1);
   3749   VisitForStackValue(args->at(0));
   3750   __ CallStub(&stub);
   3751   context()->Plug(eax);
   3752 }
   3753 
   3754 
   3755 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
   3756   // Load the argument on the stack and call the runtime function.
   3757   ZoneList<Expression*>* args = expr->arguments();
   3758   ASSERT(args->length() == 1);
   3759   VisitForStackValue(args->at(0));
   3760   __ CallRuntime(Runtime::kMath_sqrt, 1);
   3761   context()->Plug(eax);
   3762 }
   3763 
   3764 
   3765 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
   3766   ZoneList<Expression*>* args = expr->arguments();
   3767   ASSERT(args->length() >= 2);
   3768 
   3769   int arg_count = args->length() - 2;  // 2 ~ receiver and function.
   3770   for (int i = 0; i < arg_count + 1; ++i) {
   3771     VisitForStackValue(args->at(i));
   3772   }
   3773   VisitForAccumulatorValue(args->last());  // Function.
   3774 
   3775   Label runtime, done;
   3776   // Check for non-function argument (including proxy).
   3777   __ JumpIfSmi(eax, &runtime);
   3778   __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
   3779   __ j(not_equal, &runtime);
   3780 
   3781   // InvokeFunction requires the function in edi. Move it in there.
   3782   __ mov(edi, result_register());
   3783   ParameterCount count(arg_count);
   3784   __ InvokeFunction(edi, count, CALL_FUNCTION,
   3785                     NullCallWrapper(), CALL_AS_METHOD);
   3786   __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
   3787   __ jmp(&done);
   3788 
   3789   __ bind(&runtime);
   3790   __ push(eax);
   3791   __ CallRuntime(Runtime::kCall, args->length());
   3792   __ bind(&done);
   3793 
   3794   context()->Plug(eax);
   3795 }
   3796 
   3797 
   3798 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
   3799   // Load the arguments on the stack and call the stub.
   3800   RegExpConstructResultStub stub;
   3801   ZoneList<Expression*>* args = expr->arguments();
   3802   ASSERT(args->length() == 3);
   3803   VisitForStackValue(args->at(0));
   3804   VisitForStackValue(args->at(1));
   3805   VisitForStackValue(args->at(2));
   3806   __ CallStub(&stub);
   3807   context()->Plug(eax);
   3808 }
   3809 
   3810 
   3811 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
   3812   ZoneList<Expression*>* args = expr->arguments();
   3813   ASSERT_EQ(2, args->length());
   3814 
   3815   ASSERT_NE(NULL, args->at(0)->AsLiteral());
   3816   int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
   3817 
   3818   Handle<FixedArray> jsfunction_result_caches(
   3819       isolate()->native_context()->jsfunction_result_caches());
   3820   if (jsfunction_result_caches->length() <= cache_id) {
   3821     __ Abort(kAttemptToUseUndefinedCache);
   3822     __ mov(eax, isolate()->factory()->undefined_value());
   3823     context()->Plug(eax);
   3824     return;
   3825   }
   3826 
   3827   VisitForAccumulatorValue(args->at(1));
   3828 
   3829   Register key = eax;
   3830   Register cache = ebx;
   3831   Register tmp = ecx;
   3832   __ mov(cache, ContextOperand(esi, Context::GLOBAL_OBJECT_INDEX));
   3833   __ mov(cache,
   3834          FieldOperand(cache, GlobalObject::kNativeContextOffset));
   3835   __ mov(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
   3836   __ mov(cache,
   3837          FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
   3838 
   3839   Label done, not_found;
   3840   // tmp now holds finger offset as a smi.
   3841   STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
   3842   __ mov(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset));
   3843   __ cmp(key, CodeGenerator::FixedArrayElementOperand(cache, tmp));
   3844   __ j(not_equal, &not_found);
   3845 
   3846   __ mov(eax, CodeGenerator::FixedArrayElementOperand(cache, tmp, 1));
   3847   __ jmp(&done);
   3848 
   3849   __ bind(&not_found);
   3850   // Call runtime to perform the lookup.
   3851   __ push(cache);
   3852   __ push(key);
   3853   __ CallRuntime(Runtime::kGetFromCache, 2);
   3854 
   3855   __ bind(&done);
   3856   context()->Plug(eax);
   3857 }
   3858 
   3859 
   3860 void FullCodeGenerator::EmitIsRegExpEquivalent(CallRuntime* expr) {
   3861   ZoneList<Expression*>* args = expr->arguments();
   3862   ASSERT_EQ(2, args->length());
   3863 
   3864   Register right = eax;
   3865   Register left = ebx;
   3866   Register tmp = ecx;
   3867 
   3868   VisitForStackValue(args->at(0));
   3869   VisitForAccumulatorValue(args->at(1));
   3870   __ pop(left);
   3871 
   3872   Label done, fail, ok;
   3873   __ cmp(left, right);
   3874   __ j(equal, &ok);
   3875   // Fail if either is a non-HeapObject.
   3876   __ mov(tmp, left);
   3877   __ and_(tmp, right);
   3878   __ JumpIfSmi(tmp, &fail);
   3879   __ mov(tmp, FieldOperand(left, HeapObject::kMapOffset));
   3880   __ CmpInstanceType(tmp, JS_REGEXP_TYPE);
   3881   __ j(not_equal, &fail);
   3882   __ cmp(tmp, FieldOperand(right, HeapObject::kMapOffset));
   3883   __ j(not_equal, &fail);
   3884   __ mov(tmp, FieldOperand(left, JSRegExp::kDataOffset));
   3885   __ cmp(tmp, FieldOperand(right, JSRegExp::kDataOffset));
   3886   __ j(equal, &ok);
   3887   __ bind(&fail);
   3888   __ mov(eax, Immediate(isolate()->factory()->false_value()));
   3889   __ jmp(&done);
   3890   __ bind(&ok);
   3891   __ mov(eax, Immediate(isolate()->factory()->true_value()));
   3892   __ bind(&done);
   3893 
   3894   context()->Plug(eax);
   3895 }
   3896 
   3897 
   3898 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
   3899   ZoneList<Expression*>* args = expr->arguments();
   3900   ASSERT(args->length() == 1);
   3901 
   3902   VisitForAccumulatorValue(args->at(0));
   3903 
   3904   __ AssertString(eax);
   3905 
   3906   Label materialize_true, materialize_false;
   3907   Label* if_true = NULL;
   3908   Label* if_false = NULL;
   3909   Label* fall_through = NULL;
   3910   context()->PrepareTest(&materialize_true, &materialize_false,
   3911                          &if_true, &if_false, &fall_through);
   3912 
   3913   __ test(FieldOperand(eax, String::kHashFieldOffset),
   3914           Immediate(String::kContainsCachedArrayIndexMask));
   3915   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   3916   Split(zero, if_true, if_false, fall_through);
   3917 
   3918   context()->Plug(if_true, if_false);
   3919 }
   3920 
   3921 
   3922 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
   3923   ZoneList<Expression*>* args = expr->arguments();
   3924   ASSERT(args->length() == 1);
   3925   VisitForAccumulatorValue(args->at(0));
   3926 
   3927   __ AssertString(eax);
   3928 
   3929   __ mov(eax, FieldOperand(eax, String::kHashFieldOffset));
   3930   __ IndexFromHash(eax, eax);
   3931 
   3932   context()->Plug(eax);
   3933 }
   3934 
   3935 
   3936 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
   3937   Label bailout, done, one_char_separator, long_separator,
   3938       non_trivial_array, not_size_one_array, loop,
   3939       loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
   3940 
   3941   ZoneList<Expression*>* args = expr->arguments();
   3942   ASSERT(args->length() == 2);
   3943   // We will leave the separator on the stack until the end of the function.
   3944   VisitForStackValue(args->at(1));
   3945   // Load this to eax (= array)
   3946   VisitForAccumulatorValue(args->at(0));
   3947   // All aliases of the same register have disjoint lifetimes.
   3948   Register array = eax;
   3949   Register elements = no_reg;  // Will be eax.
   3950 
   3951   Register index = edx;
   3952 
   3953   Register string_length = ecx;
   3954 
   3955   Register string = esi;
   3956 
   3957   Register scratch = ebx;
   3958 
   3959   Register array_length = edi;
   3960   Register result_pos = no_reg;  // Will be edi.
   3961 
   3962   // Separator operand is already pushed.
   3963   Operand separator_operand = Operand(esp, 2 * kPointerSize);
   3964   Operand result_operand = Operand(esp, 1 * kPointerSize);
   3965   Operand array_length_operand = Operand(esp, 0);
   3966   __ sub(esp, Immediate(2 * kPointerSize));
   3967   __ cld();
   3968   // Check that the array is a JSArray
   3969   __ JumpIfSmi(array, &bailout);
   3970   __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
   3971   __ j(not_equal, &bailout);
   3972 
   3973   // Check that the array has fast elements.
   3974   __ CheckFastElements(scratch, &bailout);
   3975 
   3976   // If the array has length zero, return the empty string.
   3977   __ mov(array_length, FieldOperand(array, JSArray::kLengthOffset));
   3978   __ SmiUntag(array_length);
   3979   __ j(not_zero, &non_trivial_array);
   3980   __ mov(result_operand, isolate()->factory()->empty_string());
   3981   __ jmp(&done);
   3982 
   3983   // Save the array length.
   3984   __ bind(&non_trivial_array);
   3985   __ mov(array_length_operand, array_length);
   3986 
   3987   // Save the FixedArray containing array's elements.
   3988   // End of array's live range.
   3989   elements = array;
   3990   __ mov(elements, FieldOperand(array, JSArray::kElementsOffset));
   3991   array = no_reg;
   3992 
   3993 
   3994   // Check that all array elements are sequential ASCII strings, and
   3995   // accumulate the sum of their lengths, as a smi-encoded value.
   3996   __ Set(index, Immediate(0));
   3997   __ Set(string_length, Immediate(0));
   3998   // Loop condition: while (index < length).
   3999   // Live loop registers: index, array_length, string,
   4000   //                      scratch, string_length, elements.
   4001   if (generate_debug_code_) {
   4002     __ cmp(index, array_length);
   4003     __ Assert(less, kNoEmptyArraysHereInEmitFastAsciiArrayJoin);
   4004   }
   4005   __ bind(&loop);
   4006   __ mov(string, FieldOperand(elements,
   4007                               index,
   4008                               times_pointer_size,
   4009                               FixedArray::kHeaderSize));
   4010   __ JumpIfSmi(string, &bailout);
   4011   __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
   4012   __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
   4013   __ and_(scratch, Immediate(
   4014       kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
   4015   __ cmp(scratch, kStringTag | kOneByteStringTag | kSeqStringTag);
   4016   __ j(not_equal, &bailout);
   4017   __ add(string_length,
   4018          FieldOperand(string, SeqOneByteString::kLengthOffset));
   4019   __ j(overflow, &bailout);
   4020   __ add(index, Immediate(1));
   4021   __ cmp(index, array_length);
   4022   __ j(less, &loop);
   4023 
   4024   // If array_length is 1, return elements[0], a string.
   4025   __ cmp(array_length, 1);
   4026   __ j(not_equal, &not_size_one_array);
   4027   __ mov(scratch, FieldOperand(elements, FixedArray::kHeaderSize));
   4028   __ mov(result_operand, scratch);
   4029   __ jmp(&done);
   4030 
   4031   __ bind(&not_size_one_array);
   4032 
   4033   // End of array_length live range.
   4034   result_pos = array_length;
   4035   array_length = no_reg;
   4036 
   4037   // Live registers:
   4038   // string_length: Sum of string lengths, as a smi.
   4039   // elements: FixedArray of strings.
   4040 
   4041   // Check that the separator is a flat ASCII string.
   4042   __ mov(string, separator_operand);
   4043   __ JumpIfSmi(string, &bailout);
   4044   __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
   4045   __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
   4046   __ and_(scratch, Immediate(
   4047       kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
   4048   __ cmp(scratch, kStringTag | kOneByteStringTag | kSeqStringTag);
   4049   __ j(not_equal, &bailout);
   4050 
   4051   // Add (separator length times array_length) - separator length
   4052   // to string_length.
   4053   __ mov(scratch, separator_operand);
   4054   __ mov(scratch, FieldOperand(scratch, SeqOneByteString::kLengthOffset));
   4055   __ sub(string_length, scratch);  // May be negative, temporarily.
   4056   __ imul(scratch, array_length_operand);
   4057   __ j(overflow, &bailout);
   4058   __ add(string_length, scratch);
   4059   __ j(overflow, &bailout);
   4060 
   4061   __ shr(string_length, 1);
   4062   // Live registers and stack values:
   4063   //   string_length
   4064   //   elements
   4065   __ AllocateAsciiString(result_pos, string_length, scratch,
   4066                          index, string, &bailout);
   4067   __ mov(result_operand, result_pos);
   4068   __ lea(result_pos, FieldOperand(result_pos, SeqOneByteString::kHeaderSize));
   4069 
   4070 
   4071   __ mov(string, separator_operand);
   4072   __ cmp(FieldOperand(string, SeqOneByteString::kLengthOffset),
   4073          Immediate(Smi::FromInt(1)));
   4074   __ j(equal, &one_char_separator);
   4075   __ j(greater, &long_separator);
   4076 
   4077 
   4078   // Empty separator case
   4079   __ mov(index, Immediate(0));
   4080   __ jmp(&loop_1_condition);
   4081   // Loop condition: while (index < length).
   4082   __ bind(&loop_1);
   4083   // Each iteration of the loop concatenates one string to the result.
   4084   // Live values in registers:
   4085   //   index: which element of the elements array we are adding to the result.
   4086   //   result_pos: the position to which we are currently copying characters.
   4087   //   elements: the FixedArray of strings we are joining.
   4088 
   4089   // Get string = array[index].
   4090   __ mov(string, FieldOperand(elements, index,
   4091                               times_pointer_size,
   4092                               FixedArray::kHeaderSize));
   4093   __ mov(string_length,
   4094          FieldOperand(string, String::kLengthOffset));
   4095   __ shr(string_length, 1);
   4096   __ lea(string,
   4097          FieldOperand(string, SeqOneByteString::kHeaderSize));
   4098   __ CopyBytes(string, result_pos, string_length, scratch);
   4099   __ add(index, Immediate(1));
   4100   __ bind(&loop_1_condition);
   4101   __ cmp(index, array_length_operand);
   4102   __ j(less, &loop_1);  // End while (index < length).
   4103   __ jmp(&done);
   4104 
   4105 
   4106 
   4107   // One-character separator case
   4108   __ bind(&one_char_separator);
   4109   // Replace separator with its ASCII character value.
   4110   __ mov_b(scratch, FieldOperand(string, SeqOneByteString::kHeaderSize));
   4111   __ mov_b(separator_operand, scratch);
   4112 
   4113   __ Set(index, Immediate(0));
   4114   // Jump into the loop after the code that copies the separator, so the first
   4115   // element is not preceded by a separator
   4116   __ jmp(&loop_2_entry);
   4117   // Loop condition: while (index < length).
   4118   __ bind(&loop_2);
   4119   // Each iteration of the loop concatenates one string to the result.
   4120   // Live values in registers:
   4121   //   index: which element of the elements array we are adding to the result.
   4122   //   result_pos: the position to which we are currently copying characters.
   4123 
   4124   // Copy the separator character to the result.
   4125   __ mov_b(scratch, separator_operand);
   4126   __ mov_b(Operand(result_pos, 0), scratch);
   4127   __ inc(result_pos);
   4128 
   4129   __ bind(&loop_2_entry);
   4130   // Get string = array[index].
   4131   __ mov(string, FieldOperand(elements, index,
   4132                               times_pointer_size,
   4133                               FixedArray::kHeaderSize));
   4134   __ mov(string_length,
   4135          FieldOperand(string, String::kLengthOffset));
   4136   __ shr(string_length, 1);
   4137   __ lea(string,
   4138          FieldOperand(string, SeqOneByteString::kHeaderSize));
   4139   __ CopyBytes(string, result_pos, string_length, scratch);
   4140   __ add(index, Immediate(1));
   4141 
   4142   __ cmp(index, array_length_operand);
   4143   __ j(less, &loop_2);  // End while (index < length).
   4144   __ jmp(&done);
   4145 
   4146 
   4147   // Long separator case (separator is more than one character).
   4148   __ bind(&long_separator);
   4149 
   4150   __ Set(index, Immediate(0));
   4151   // Jump into the loop after the code that copies the separator, so the first
   4152   // element is not preceded by a separator
   4153   __ jmp(&loop_3_entry);
   4154   // Loop condition: while (index < length).
   4155   __ bind(&loop_3);
   4156   // Each iteration of the loop concatenates one string to the result.
   4157   // Live values in registers:
   4158   //   index: which element of the elements array we are adding to the result.
   4159   //   result_pos: the position to which we are currently copying characters.
   4160 
   4161   // Copy the separator to the result.
   4162   __ mov(string, separator_operand);
   4163   __ mov(string_length,
   4164          FieldOperand(string, String::kLengthOffset));
   4165   __ shr(string_length, 1);
   4166   __ lea(string,
   4167          FieldOperand(string, SeqOneByteString::kHeaderSize));
   4168   __ CopyBytes(string, result_pos, string_length, scratch);
   4169 
   4170   __ bind(&loop_3_entry);
   4171   // Get string = array[index].
   4172   __ mov(string, FieldOperand(elements, index,
   4173                               times_pointer_size,
   4174                               FixedArray::kHeaderSize));
   4175   __ mov(string_length,
   4176          FieldOperand(string, String::kLengthOffset));
   4177   __ shr(string_length, 1);
   4178   __ lea(string,
   4179          FieldOperand(string, SeqOneByteString::kHeaderSize));
   4180   __ CopyBytes(string, result_pos, string_length, scratch);
   4181   __ add(index, Immediate(1));
   4182 
   4183   __ cmp(index, array_length_operand);
   4184   __ j(less, &loop_3);  // End while (index < length).
   4185   __ jmp(&done);
   4186 
   4187 
   4188   __ bind(&bailout);
   4189   __ mov(result_operand, isolate()->factory()->undefined_value());
   4190   __ bind(&done);
   4191   __ mov(eax, result_operand);
   4192   // Drop temp values from the stack, and restore context register.
   4193   __ add(esp, Immediate(3 * kPointerSize));
   4194 
   4195   __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
   4196   context()->Plug(eax);
   4197 }
   4198 
   4199 
   4200 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
   4201   Handle<String> name = expr->name();
   4202   if (name->length() > 0 && name->Get(0) == '_') {
   4203     Comment cmnt(masm_, "[ InlineRuntimeCall");
   4204     EmitInlineRuntimeCall(expr);
   4205     return;
   4206   }
   4207 
   4208   Comment cmnt(masm_, "[ CallRuntime");
   4209   ZoneList<Expression*>* args = expr->arguments();
   4210 
   4211   if (expr->is_jsruntime()) {
   4212     // Prepare for calling JS runtime function.
   4213     __ mov(eax, GlobalObjectOperand());
   4214     __ push(FieldOperand(eax, GlobalObject::kBuiltinsOffset));
   4215   }
   4216 
   4217   // Push the arguments ("left-to-right").
   4218   int arg_count = args->length();
   4219   for (int i = 0; i < arg_count; i++) {
   4220     VisitForStackValue(args->at(i));
   4221   }
   4222 
   4223   if (expr->is_jsruntime()) {
   4224     // Call the JS runtime function via a call IC.
   4225     __ Set(ecx, Immediate(expr->name()));
   4226     RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
   4227     Handle<Code> ic =
   4228         isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
   4229     CallIC(ic, mode, expr->CallRuntimeFeedbackId());
   4230     // Restore context register.
   4231     __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
   4232   } else {
   4233     // Call the C runtime function.
   4234     __ CallRuntime(expr->function(), arg_count);
   4235   }
   4236   context()->Plug(eax);
   4237 }
   4238 
   4239 
   4240 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
   4241   switch (expr->op()) {
   4242     case Token::DELETE: {
   4243       Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
   4244       Property* property = expr->expression()->AsProperty();
   4245       VariableProxy* proxy = expr->expression()->AsVariableProxy();
   4246 
   4247       if (property != NULL) {
   4248         VisitForStackValue(property->obj());
   4249         VisitForStackValue(property->key());
   4250         StrictModeFlag strict_mode_flag = (language_mode() == CLASSIC_MODE)
   4251             ? kNonStrictMode : kStrictMode;
   4252         __ push(Immediate(Smi::FromInt(strict_mode_flag)));
   4253         __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
   4254         context()->Plug(eax);
   4255       } else if (proxy != NULL) {
   4256         Variable* var = proxy->var();
   4257         // Delete of an unqualified identifier is disallowed in strict mode
   4258         // but "delete this" is allowed.
   4259         ASSERT(language_mode() == CLASSIC_MODE || var->is_this());
   4260         if (var->IsUnallocated()) {
   4261           __ push(GlobalObjectOperand());
   4262           __ push(Immediate(var->name()));
   4263           __ push(Immediate(Smi::FromInt(kNonStrictMode)));
   4264           __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
   4265           context()->Plug(eax);
   4266         } else if (var->IsStackAllocated() || var->IsContextSlot()) {
   4267           // Result of deleting non-global variables is false.  'this' is
   4268           // not really a variable, though we implement it as one.  The
   4269           // subexpression does not have side effects.
   4270           context()->Plug(var->is_this());
   4271         } else {
   4272           // Non-global variable.  Call the runtime to try to delete from the
   4273           // context where the variable was introduced.
   4274           __ push(context_register());
   4275           __ push(Immediate(var->name()));
   4276           __ CallRuntime(Runtime::kDeleteContextSlot, 2);
   4277           context()->Plug(eax);
   4278         }
   4279       } else {
   4280         // Result of deleting non-property, non-variable reference is true.
   4281         // The subexpression may have side effects.
   4282         VisitForEffect(expr->expression());
   4283         context()->Plug(true);
   4284       }
   4285       break;
   4286     }
   4287 
   4288     case Token::VOID: {
   4289       Comment cmnt(masm_, "[ UnaryOperation (VOID)");
   4290       VisitForEffect(expr->expression());
   4291       context()->Plug(isolate()->factory()->undefined_value());
   4292       break;
   4293     }
   4294 
   4295     case Token::NOT: {
   4296       Comment cmnt(masm_, "[ UnaryOperation (NOT)");
   4297       if (context()->IsEffect()) {
   4298         // Unary NOT has no side effects so it's only necessary to visit the
   4299         // subexpression.  Match the optimizing compiler by not branching.
   4300         VisitForEffect(expr->expression());
   4301       } else if (context()->IsTest()) {
   4302         const TestContext* test = TestContext::cast(context());
   4303         // The labels are swapped for the recursive call.
   4304         VisitForControl(expr->expression(),
   4305                         test->false_label(),
   4306                         test->true_label(),
   4307                         test->fall_through());
   4308         context()->Plug(test->true_label(), test->false_label());
   4309       } else {
   4310         // We handle value contexts explicitly rather than simply visiting
   4311         // for control and plugging the control flow into the context,
   4312         // because we need to prepare a pair of extra administrative AST ids
   4313         // for the optimizing compiler.
   4314         ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
   4315         Label materialize_true, materialize_false, done;
   4316         VisitForControl(expr->expression(),
   4317                         &materialize_false,
   4318                         &materialize_true,
   4319                         &materialize_true);
   4320         __ bind(&materialize_true);
   4321         PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
   4322         if (context()->IsAccumulatorValue()) {
   4323           __ mov(eax, isolate()->factory()->true_value());
   4324         } else {
   4325           __ Push(isolate()->factory()->true_value());
   4326         }
   4327         __ jmp(&done, Label::kNear);
   4328         __ bind(&materialize_false);
   4329         PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
   4330         if (context()->IsAccumulatorValue()) {
   4331           __ mov(eax, isolate()->factory()->false_value());
   4332         } else {
   4333           __ Push(isolate()->factory()->false_value());
   4334         }
   4335         __ bind(&done);
   4336       }
   4337       break;
   4338     }
   4339 
   4340     case Token::TYPEOF: {
   4341       Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
   4342       { StackValueContext context(this);
   4343         VisitForTypeofValue(expr->expression());
   4344       }
   4345       __ CallRuntime(Runtime::kTypeof, 1);
   4346       context()->Plug(eax);
   4347       break;
   4348     }
   4349 
   4350     default:
   4351       UNREACHABLE();
   4352   }
   4353 }
   4354 
   4355 
   4356 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
   4357   Comment cmnt(masm_, "[ CountOperation");
   4358   SetSourcePosition(expr->position());
   4359 
   4360   // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
   4361   // as the left-hand side.
   4362   if (!expr->expression()->IsValidLeftHandSide()) {
   4363     VisitForEffect(expr->expression());
   4364     return;
   4365   }
   4366 
   4367   // Expression can only be a property, a global or a (parameter or local)
   4368   // slot.
   4369   enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
   4370   LhsKind assign_type = VARIABLE;
   4371   Property* prop = expr->expression()->AsProperty();
   4372   // In case of a property we use the uninitialized expression context
   4373   // of the key to detect a named property.
   4374   if (prop != NULL) {
   4375     assign_type =
   4376         (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
   4377   }
   4378 
   4379   // Evaluate expression and get value.
   4380   if (assign_type == VARIABLE) {
   4381     ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
   4382     AccumulatorValueContext context(this);
   4383     EmitVariableLoad(expr->expression()->AsVariableProxy());
   4384   } else {
   4385     // Reserve space for result of postfix operation.
   4386     if (expr->is_postfix() && !context()->IsEffect()) {
   4387       __ push(Immediate(Smi::FromInt(0)));
   4388     }
   4389     if (assign_type == NAMED_PROPERTY) {
   4390       // Put the object both on the stack and in edx.
   4391       VisitForAccumulatorValue(prop->obj());
   4392       __ push(eax);
   4393       __ mov(edx, eax);
   4394       EmitNamedPropertyLoad(prop);
   4395     } else {
   4396       VisitForStackValue(prop->obj());
   4397       VisitForStackValue(prop->key());
   4398       __ mov(edx, Operand(esp, kPointerSize));  // Object.
   4399       __ mov(ecx, Operand(esp, 0));             // Key.
   4400       EmitKeyedPropertyLoad(prop);
   4401     }
   4402   }
   4403 
   4404   // We need a second deoptimization point after loading the value
   4405   // in case evaluating the property load my have a side effect.
   4406   if (assign_type == VARIABLE) {
   4407     PrepareForBailout(expr->expression(), TOS_REG);
   4408   } else {
   4409     PrepareForBailoutForId(prop->LoadId(), TOS_REG);
   4410   }
   4411 
   4412   // Call ToNumber only if operand is not a smi.
   4413   Label no_conversion;
   4414   if (ShouldInlineSmiCase(expr->op())) {
   4415     __ JumpIfSmi(eax, &no_conversion, Label::kNear);
   4416   }
   4417   ToNumberStub convert_stub;
   4418   __ CallStub(&convert_stub);
   4419   __ bind(&no_conversion);
   4420 
   4421   // Save result for postfix expressions.
   4422   if (expr->is_postfix()) {
   4423     if (!context()->IsEffect()) {
   4424       // Save the result on the stack. If we have a named or keyed property
   4425       // we store the result under the receiver that is currently on top
   4426       // of the stack.
   4427       switch (assign_type) {
   4428         case VARIABLE:
   4429           __ push(eax);
   4430           break;
   4431         case NAMED_PROPERTY:
   4432           __ mov(Operand(esp, kPointerSize), eax);
   4433           break;
   4434         case KEYED_PROPERTY:
   4435           __ mov(Operand(esp, 2 * kPointerSize), eax);
   4436           break;
   4437       }
   4438     }
   4439   }
   4440 
   4441   // Inline smi case if we are in a loop.
   4442   Label done, stub_call;
   4443   JumpPatchSite patch_site(masm_);
   4444 
   4445   if (ShouldInlineSmiCase(expr->op())) {
   4446     if (expr->op() == Token::INC) {
   4447       __ add(eax, Immediate(Smi::FromInt(1)));
   4448     } else {
   4449       __ sub(eax, Immediate(Smi::FromInt(1)));
   4450     }
   4451     __ j(overflow, &stub_call, Label::kNear);
   4452     // We could eliminate this smi check if we split the code at
   4453     // the first smi check before calling ToNumber.
   4454     patch_site.EmitJumpIfSmi(eax, &done, Label::kNear);
   4455 
   4456     __ bind(&stub_call);
   4457     // Call stub. Undo operation first.
   4458     if (expr->op() == Token::INC) {
   4459       __ sub(eax, Immediate(Smi::FromInt(1)));
   4460     } else {
   4461       __ add(eax, Immediate(Smi::FromInt(1)));
   4462     }
   4463   }
   4464 
   4465   // Record position before stub call.
   4466   SetSourcePosition(expr->position());
   4467 
   4468   // Call stub for +1/-1.
   4469   __ mov(edx, eax);
   4470   __ mov(eax, Immediate(Smi::FromInt(1)));
   4471   BinaryOpStub stub(expr->binary_op(), NO_OVERWRITE);
   4472   CallIC(stub.GetCode(isolate()),
   4473          RelocInfo::CODE_TARGET,
   4474          expr->CountBinOpFeedbackId());
   4475   patch_site.EmitPatchInfo();
   4476   __ bind(&done);
   4477 
   4478   // Store the value returned in eax.
   4479   switch (assign_type) {
   4480     case VARIABLE:
   4481       if (expr->is_postfix()) {
   4482         // Perform the assignment as if via '='.
   4483         { EffectContext context(this);
   4484           EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
   4485                                  Token::ASSIGN);
   4486           PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   4487           context.Plug(eax);
   4488         }
   4489         // For all contexts except EffectContext We have the result on
   4490         // top of the stack.
   4491         if (!context()->IsEffect()) {
   4492           context()->PlugTOS();
   4493         }
   4494       } else {
   4495         // Perform the assignment as if via '='.
   4496         EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
   4497                                Token::ASSIGN);
   4498         PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   4499         context()->Plug(eax);
   4500       }
   4501       break;
   4502     case NAMED_PROPERTY: {
   4503       __ mov(ecx, prop->key()->AsLiteral()->value());
   4504       __ pop(edx);
   4505       Handle<Code> ic = is_classic_mode()
   4506           ? isolate()->builtins()->StoreIC_Initialize()
   4507           : isolate()->builtins()->StoreIC_Initialize_Strict();
   4508       CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
   4509       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   4510       if (expr->is_postfix()) {
   4511         if (!context()->IsEffect()) {
   4512           context()->PlugTOS();
   4513         }
   4514       } else {
   4515         context()->Plug(eax);
   4516       }
   4517       break;
   4518     }
   4519     case KEYED_PROPERTY: {
   4520       __ pop(ecx);
   4521       __ pop(edx);
   4522       Handle<Code> ic = is_classic_mode()
   4523           ? isolate()->builtins()->KeyedStoreIC_Initialize()
   4524           : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
   4525       CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
   4526       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   4527       if (expr->is_postfix()) {
   4528         // Result is on the stack
   4529         if (!context()->IsEffect()) {
   4530           context()->PlugTOS();
   4531         }
   4532       } else {
   4533         context()->Plug(eax);
   4534       }
   4535       break;
   4536     }
   4537   }
   4538 }
   4539 
   4540 
   4541 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
   4542   VariableProxy* proxy = expr->AsVariableProxy();
   4543   ASSERT(!context()->IsEffect());
   4544   ASSERT(!context()->IsTest());
   4545 
   4546   if (proxy != NULL && proxy->var()->IsUnallocated()) {
   4547     Comment cmnt(masm_, "Global variable");
   4548     __ mov(edx, GlobalObjectOperand());
   4549     __ mov(ecx, Immediate(proxy->name()));
   4550     Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
   4551     // Use a regular load, not a contextual load, to avoid a reference
   4552     // error.
   4553     CallIC(ic);
   4554     PrepareForBailout(expr, TOS_REG);
   4555     context()->Plug(eax);
   4556   } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
   4557     Label done, slow;
   4558 
   4559     // Generate code for loading from variables potentially shadowed
   4560     // by eval-introduced variables.
   4561     EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
   4562 
   4563     __ bind(&slow);
   4564     __ push(esi);
   4565     __ push(Immediate(proxy->name()));
   4566     __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
   4567     PrepareForBailout(expr, TOS_REG);
   4568     __ bind(&done);
   4569 
   4570     context()->Plug(eax);
   4571   } else {
   4572     // This expression cannot throw a reference error at the top level.
   4573     VisitInDuplicateContext(expr);
   4574   }
   4575 }
   4576 
   4577 
   4578 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
   4579                                                  Expression* sub_expr,
   4580                                                  Handle<String> check) {
   4581   Label materialize_true, materialize_false;
   4582   Label* if_true = NULL;
   4583   Label* if_false = NULL;
   4584   Label* fall_through = NULL;
   4585   context()->PrepareTest(&materialize_true, &materialize_false,
   4586                          &if_true, &if_false, &fall_through);
   4587 
   4588   { AccumulatorValueContext context(this);
   4589     VisitForTypeofValue(sub_expr);
   4590   }
   4591   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   4592 
   4593   if (check->Equals(isolate()->heap()->number_string())) {
   4594     __ JumpIfSmi(eax, if_true);
   4595     __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
   4596            isolate()->factory()->heap_number_map());
   4597     Split(equal, if_true, if_false, fall_through);
   4598   } else if (check->Equals(isolate()->heap()->string_string())) {
   4599     __ JumpIfSmi(eax, if_false);
   4600     __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edx);
   4601     __ j(above_equal, if_false);
   4602     // Check for undetectable objects => false.
   4603     __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
   4604               1 << Map::kIsUndetectable);
   4605     Split(zero, if_true, if_false, fall_through);
   4606   } else if (check->Equals(isolate()->heap()->symbol_string())) {
   4607     __ JumpIfSmi(eax, if_false);
   4608     __ CmpObjectType(eax, SYMBOL_TYPE, edx);
   4609     Split(equal, if_true, if_false, fall_through);
   4610   } else if (check->Equals(isolate()->heap()->boolean_string())) {
   4611     __ cmp(eax, isolate()->factory()->true_value());
   4612     __ j(equal, if_true);
   4613     __ cmp(eax, isolate()->factory()->false_value());
   4614     Split(equal, if_true, if_false, fall_through);
   4615   } else if (FLAG_harmony_typeof &&
   4616              check->Equals(isolate()->heap()->null_string())) {
   4617     __ cmp(eax, isolate()->factory()->null_value());
   4618     Split(equal, if_true, if_false, fall_through);
   4619   } else if (check->Equals(isolate()->heap()->undefined_string())) {
   4620     __ cmp(eax, isolate()->factory()->undefined_value());
   4621     __ j(equal, if_true);
   4622     __ JumpIfSmi(eax, if_false);
   4623     // Check for undetectable objects => true.
   4624     __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
   4625     __ movzx_b(ecx, FieldOperand(edx, Map::kBitFieldOffset));
   4626     __ test(ecx, Immediate(1 << Map::kIsUndetectable));
   4627     Split(not_zero, if_true, if_false, fall_through);
   4628   } else if (check->Equals(isolate()->heap()->function_string())) {
   4629     __ JumpIfSmi(eax, if_false);
   4630     STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
   4631     __ CmpObjectType(eax, JS_FUNCTION_TYPE, edx);
   4632     __ j(equal, if_true);
   4633     __ CmpInstanceType(edx, JS_FUNCTION_PROXY_TYPE);
   4634     Split(equal, if_true, if_false, fall_through);
   4635   } else if (check->Equals(isolate()->heap()->object_string())) {
   4636     __ JumpIfSmi(eax, if_false);
   4637     if (!FLAG_harmony_typeof) {
   4638       __ cmp(eax, isolate()->factory()->null_value());
   4639       __ j(equal, if_true);
   4640     }
   4641     __ CmpObjectType(eax, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, edx);
   4642     __ j(below, if_false);
   4643     __ CmpInstanceType(edx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
   4644     __ j(above, if_false);
   4645     // Check for undetectable objects => false.
   4646     __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
   4647               1 << Map::kIsUndetectable);
   4648     Split(zero, if_true, if_false, fall_through);
   4649   } else {
   4650     if (if_false != fall_through) __ jmp(if_false);
   4651   }
   4652   context()->Plug(if_true, if_false);
   4653 }
   4654 
   4655 
   4656 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
   4657   Comment cmnt(masm_, "[ CompareOperation");
   4658   SetSourcePosition(expr->position());
   4659 
   4660   // First we try a fast inlined version of the compare when one of
   4661   // the operands is a literal.
   4662   if (TryLiteralCompare(expr)) return;
   4663 
   4664   // Always perform the comparison for its control flow.  Pack the result
   4665   // into the expression's context after the comparison is performed.
   4666   Label materialize_true, materialize_false;
   4667   Label* if_true = NULL;
   4668   Label* if_false = NULL;
   4669   Label* fall_through = NULL;
   4670   context()->PrepareTest(&materialize_true, &materialize_false,
   4671                          &if_true, &if_false, &fall_through);
   4672 
   4673   Token::Value op = expr->op();
   4674   VisitForStackValue(expr->left());
   4675   switch (op) {
   4676     case Token::IN:
   4677       VisitForStackValue(expr->right());
   4678       __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
   4679       PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
   4680       __ cmp(eax, isolate()->factory()->true_value());
   4681       Split(equal, if_true, if_false, fall_through);
   4682       break;
   4683 
   4684     case Token::INSTANCEOF: {
   4685       VisitForStackValue(expr->right());
   4686       InstanceofStub stub(InstanceofStub::kNoFlags);
   4687       __ CallStub(&stub);
   4688       PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   4689       __ test(eax, eax);
   4690       // The stub returns 0 for true.
   4691       Split(zero, if_true, if_false, fall_through);
   4692       break;
   4693     }
   4694 
   4695     default: {
   4696       VisitForAccumulatorValue(expr->right());
   4697       Condition cc = CompareIC::ComputeCondition(op);
   4698       __ pop(edx);
   4699 
   4700       bool inline_smi_code = ShouldInlineSmiCase(op);
   4701       JumpPatchSite patch_site(masm_);
   4702       if (inline_smi_code) {
   4703         Label slow_case;
   4704         __ mov(ecx, edx);
   4705         __ or_(ecx, eax);
   4706         patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
   4707         __ cmp(edx, eax);
   4708         Split(cc, if_true, if_false, NULL);
   4709         __ bind(&slow_case);
   4710       }
   4711 
   4712       // Record position and call the compare IC.
   4713       SetSourcePosition(expr->position());
   4714       Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
   4715       CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId());
   4716       patch_site.EmitPatchInfo();
   4717 
   4718       PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   4719       __ test(eax, eax);
   4720       Split(cc, if_true, if_false, fall_through);
   4721     }
   4722   }
   4723 
   4724   // Convert the result of the comparison into one expected for this
   4725   // expression's context.
   4726   context()->Plug(if_true, if_false);
   4727 }
   4728 
   4729 
   4730 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
   4731                                               Expression* sub_expr,
   4732                                               NilValue nil) {
   4733   Label materialize_true, materialize_false;
   4734   Label* if_true = NULL;
   4735   Label* if_false = NULL;
   4736   Label* fall_through = NULL;
   4737   context()->PrepareTest(&materialize_true, &materialize_false,
   4738                          &if_true, &if_false, &fall_through);
   4739 
   4740   VisitForAccumulatorValue(sub_expr);
   4741   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   4742 
   4743   Handle<Object> nil_value = nil == kNullValue
   4744       ? isolate()->factory()->null_value()
   4745       : isolate()->factory()->undefined_value();
   4746   if (expr->op() == Token::EQ_STRICT) {
   4747     __ cmp(eax, nil_value);
   4748     Split(equal, if_true, if_false, fall_through);
   4749   } else {
   4750     Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
   4751     CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId());
   4752     __ test(eax, eax);
   4753     Split(not_zero, if_true, if_false, fall_through);
   4754   }
   4755   context()->Plug(if_true, if_false);
   4756 }
   4757 
   4758 
   4759 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
   4760   __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
   4761   context()->Plug(eax);
   4762 }
   4763 
   4764 
   4765 Register FullCodeGenerator::result_register() {
   4766   return eax;
   4767 }
   4768 
   4769 
   4770 Register FullCodeGenerator::context_register() {
   4771   return esi;
   4772 }
   4773 
   4774 
   4775 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
   4776   ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
   4777   __ mov(Operand(ebp, frame_offset), value);
   4778 }
   4779 
   4780 
   4781 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
   4782   __ mov(dst, ContextOperand(esi, context_index));
   4783 }
   4784 
   4785 
   4786 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
   4787   Scope* declaration_scope = scope()->DeclarationScope();
   4788   if (declaration_scope->is_global_scope() ||
   4789       declaration_scope->is_module_scope()) {
   4790     // Contexts nested in the native context have a canonical empty function
   4791     // as their closure, not the anonymous closure containing the global
   4792     // code.  Pass a smi sentinel and let the runtime look up the empty
   4793     // function.
   4794     __ push(Immediate(Smi::FromInt(0)));
   4795   } else if (declaration_scope->is_eval_scope()) {
   4796     // Contexts nested inside eval code have the same closure as the context
   4797     // calling eval, not the anonymous closure containing the eval code.
   4798     // Fetch it from the context.
   4799     __ push(ContextOperand(esi, Context::CLOSURE_INDEX));
   4800   } else {
   4801     ASSERT(declaration_scope->is_function_scope());
   4802     __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
   4803   }
   4804 }
   4805 
   4806 
   4807 // ----------------------------------------------------------------------------
   4808 // Non-local control flow support.
   4809 
   4810 void FullCodeGenerator::EnterFinallyBlock() {
   4811   // Cook return address on top of stack (smi encoded Code* delta)
   4812   ASSERT(!result_register().is(edx));
   4813   __ pop(edx);
   4814   __ sub(edx, Immediate(masm_->CodeObject()));
   4815   STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
   4816   STATIC_ASSERT(kSmiTag == 0);
   4817   __ SmiTag(edx);
   4818   __ push(edx);
   4819 
   4820   // Store result register while executing finally block.
   4821   __ push(result_register());
   4822 
   4823   // Store pending message while executing finally block.
   4824   ExternalReference pending_message_obj =
   4825       ExternalReference::address_of_pending_message_obj(isolate());
   4826   __ mov(edx, Operand::StaticVariable(pending_message_obj));
   4827   __ push(edx);
   4828 
   4829   ExternalReference has_pending_message =
   4830       ExternalReference::address_of_has_pending_message(isolate());
   4831   __ mov(edx, Operand::StaticVariable(has_pending_message));
   4832   __ SmiTag(edx);
   4833   __ push(edx);
   4834 
   4835   ExternalReference pending_message_script =
   4836       ExternalReference::address_of_pending_message_script(isolate());
   4837   __ mov(edx, Operand::StaticVariable(pending_message_script));
   4838   __ push(edx);
   4839 }
   4840 
   4841 
   4842 void FullCodeGenerator::ExitFinallyBlock() {
   4843   ASSERT(!result_register().is(edx));
   4844   // Restore pending message from stack.
   4845   __ pop(edx);
   4846   ExternalReference pending_message_script =
   4847       ExternalReference::address_of_pending_message_script(isolate());
   4848   __ mov(Operand::StaticVariable(pending_message_script), edx);
   4849 
   4850   __ pop(edx);
   4851   __ SmiUntag(edx);
   4852   ExternalReference has_pending_message =
   4853       ExternalReference::address_of_has_pending_message(isolate());
   4854   __ mov(Operand::StaticVariable(has_pending_message), edx);
   4855 
   4856   __ pop(edx);
   4857   ExternalReference pending_message_obj =
   4858       ExternalReference::address_of_pending_message_obj(isolate());
   4859   __ mov(Operand::StaticVariable(pending_message_obj), edx);
   4860 
   4861   // Restore result register from stack.
   4862   __ pop(result_register());
   4863 
   4864   // Uncook return address.
   4865   __ pop(edx);
   4866   __ SmiUntag(edx);
   4867   __ add(edx, Immediate(masm_->CodeObject()));
   4868   __ jmp(edx);
   4869 }
   4870 
   4871 
   4872 #undef __
   4873 
   4874 #define __ ACCESS_MASM(masm())
   4875 
   4876 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
   4877     int* stack_depth,
   4878     int* context_length) {
   4879   // The macros used here must preserve the result register.
   4880 
   4881   // Because the handler block contains the context of the finally
   4882   // code, we can restore it directly from there for the finally code
   4883   // rather than iteratively unwinding contexts via their previous
   4884   // links.
   4885   __ Drop(*stack_depth);  // Down to the handler block.
   4886   if (*context_length > 0) {
   4887     // Restore the context to its dedicated register and the stack.
   4888     __ mov(esi, Operand(esp, StackHandlerConstants::kContextOffset));
   4889     __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
   4890   }
   4891   __ PopTryHandler();
   4892   __ call(finally_entry_);
   4893 
   4894   *stack_depth = 0;
   4895   *context_length = 0;
   4896   return previous_;
   4897 }
   4898 
   4899 #undef __
   4900 
   4901 } }  // namespace v8::internal
   4902 
   4903 #endif  // V8_TARGET_ARCH_IA32
   4904