Home | History | Annotate | Download | only in ia32
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "src/v8.h"
      6 
      7 #if V8_TARGET_ARCH_IA32
      8 
      9 #include "src/code-stubs.h"
     10 #include "src/codegen.h"
     11 #include "src/compiler.h"
     12 #include "src/debug.h"
     13 #include "src/full-codegen.h"
     14 #include "src/isolate-inl.h"
     15 #include "src/parser.h"
     16 #include "src/scopes.h"
     17 #include "src/stub-cache.h"
     18 
     19 namespace v8 {
     20 namespace internal {
     21 
     22 #define __ ACCESS_MASM(masm_)
     23 
     24 
     25 class JumpPatchSite BASE_EMBEDDED {
     26  public:
     27   explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
     28 #ifdef DEBUG
     29     info_emitted_ = false;
     30 #endif
     31   }
     32 
     33   ~JumpPatchSite() {
     34     ASSERT(patch_site_.is_bound() == info_emitted_);
     35   }
     36 
     37   void EmitJumpIfNotSmi(Register reg,
     38                         Label* target,
     39                         Label::Distance distance = Label::kFar) {
     40     __ test(reg, Immediate(kSmiTagMask));
     41     EmitJump(not_carry, target, distance);  // Always taken before patched.
     42   }
     43 
     44   void EmitJumpIfSmi(Register reg,
     45                      Label* target,
     46                      Label::Distance distance = Label::kFar) {
     47     __ test(reg, Immediate(kSmiTagMask));
     48     EmitJump(carry, target, distance);  // Never taken before patched.
     49   }
     50 
     51   void EmitPatchInfo() {
     52     if (patch_site_.is_bound()) {
     53       int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
     54       ASSERT(is_uint8(delta_to_patch_site));
     55       __ test(eax, Immediate(delta_to_patch_site));
     56 #ifdef DEBUG
     57       info_emitted_ = true;
     58 #endif
     59     } else {
     60       __ nop();  // Signals no inlined code.
     61     }
     62   }
     63 
     64  private:
     65   // jc will be patched with jz, jnc will become jnz.
     66   void EmitJump(Condition cc, Label* target, Label::Distance distance) {
     67     ASSERT(!patch_site_.is_bound() && !info_emitted_);
     68     ASSERT(cc == carry || cc == not_carry);
     69     __ bind(&patch_site_);
     70     __ j(cc, target, distance);
     71   }
     72 
     73   MacroAssembler* masm_;
     74   Label patch_site_;
     75 #ifdef DEBUG
     76   bool info_emitted_;
     77 #endif
     78 };
     79 
     80 
     81 // Generate code for a JS function.  On entry to the function the receiver
     82 // and arguments have been pushed on the stack left to right, with the
     83 // return address on top of them.  The actual argument count matches the
     84 // formal parameter count expected by the function.
     85 //
     86 // The live registers are:
     87 //   o edi: the JS function object being called (i.e. ourselves)
     88 //   o esi: our context
     89 //   o ebp: our caller's frame pointer
     90 //   o esp: stack pointer (pointing to return address)
     91 //
     92 // The function builds a JS frame.  Please see JavaScriptFrameConstants in
     93 // frames-ia32.h for its layout.
     94 void FullCodeGenerator::Generate() {
     95   CompilationInfo* info = info_;
     96   handler_table_ =
     97       isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
     98 
     99   profiling_counter_ = isolate()->factory()->NewCell(
    100       Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
    101   SetFunctionPosition(function());
    102   Comment cmnt(masm_, "[ function compiled by full code generator");
    103 
    104   ProfileEntryHookStub::MaybeCallEntryHook(masm_);
    105 
    106 #ifdef DEBUG
    107   if (strlen(FLAG_stop_at) > 0 &&
    108       info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
    109     __ int3();
    110   }
    111 #endif
    112 
    113   // Sloppy mode functions and builtins need to replace the receiver with the
    114   // global proxy when called as functions (without an explicit receiver
    115   // object).
    116   if (info->strict_mode() == SLOPPY && !info->is_native()) {
    117     Label ok;
    118     // +1 for return address.
    119     int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize;
    120     __ mov(ecx, Operand(esp, receiver_offset));
    121 
    122     __ cmp(ecx, isolate()->factory()->undefined_value());
    123     __ j(not_equal, &ok, Label::kNear);
    124 
    125     __ mov(ecx, GlobalObjectOperand());
    126     __ mov(ecx, FieldOperand(ecx, GlobalObject::kGlobalReceiverOffset));
    127 
    128     __ mov(Operand(esp, receiver_offset), ecx);
    129 
    130     __ bind(&ok);
    131   }
    132 
    133   // Open a frame scope to indicate that there is a frame on the stack.  The
    134   // MANUAL indicates that the scope shouldn't actually generate code to set up
    135   // the frame (that is done below).
    136   FrameScope frame_scope(masm_, StackFrame::MANUAL);
    137 
    138   info->set_prologue_offset(masm_->pc_offset());
    139   __ Prologue(info->IsCodePreAgingActive());
    140   info->AddNoFrameRange(0, masm_->pc_offset());
    141 
    142   { Comment cmnt(masm_, "[ Allocate locals");
    143     int locals_count = info->scope()->num_stack_slots();
    144     // Generators allocate locals, if any, in context slots.
    145     ASSERT(!info->function()->is_generator() || locals_count == 0);
    146     if (locals_count == 1) {
    147       __ push(Immediate(isolate()->factory()->undefined_value()));
    148     } else if (locals_count > 1) {
    149       if (locals_count >= 128) {
    150         Label ok;
    151         __ mov(ecx, esp);
    152         __ sub(ecx, Immediate(locals_count * kPointerSize));
    153         ExternalReference stack_limit =
    154             ExternalReference::address_of_real_stack_limit(isolate());
    155         __ cmp(ecx, Operand::StaticVariable(stack_limit));
    156         __ j(above_equal, &ok, Label::kNear);
    157         __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
    158         __ bind(&ok);
    159       }
    160       __ mov(eax, Immediate(isolate()->factory()->undefined_value()));
    161       const int kMaxPushes = 32;
    162       if (locals_count >= kMaxPushes) {
    163         int loop_iterations = locals_count / kMaxPushes;
    164         __ mov(ecx, loop_iterations);
    165         Label loop_header;
    166         __ bind(&loop_header);
    167         // Do pushes.
    168         for (int i = 0; i < kMaxPushes; i++) {
    169           __ push(eax);
    170         }
    171         __ dec(ecx);
    172         __ j(not_zero, &loop_header, Label::kNear);
    173       }
    174       int remaining = locals_count % kMaxPushes;
    175       // Emit the remaining pushes.
    176       for (int i  = 0; i < remaining; i++) {
    177         __ push(eax);
    178       }
    179     }
    180   }
    181 
    182   bool function_in_register = true;
    183 
    184   // Possibly allocate a local context.
    185   int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
    186   if (heap_slots > 0) {
    187     Comment cmnt(masm_, "[ Allocate context");
    188     bool need_write_barrier = true;
    189     // Argument to NewContext is the function, which is still in edi.
    190     if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
    191       __ push(edi);
    192       __ Push(info->scope()->GetScopeInfo());
    193       __ CallRuntime(Runtime::kHiddenNewGlobalContext, 2);
    194     } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
    195       FastNewContextStub stub(isolate(), heap_slots);
    196       __ CallStub(&stub);
    197       // Result of FastNewContextStub is always in new space.
    198       need_write_barrier = false;
    199     } else {
    200       __ push(edi);
    201       __ CallRuntime(Runtime::kHiddenNewFunctionContext, 1);
    202     }
    203     function_in_register = false;
    204     // Context is returned in eax.  It replaces the context passed to us.
    205     // It's saved in the stack and kept live in esi.
    206     __ mov(esi, eax);
    207     __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), eax);
    208 
    209     // Copy parameters into context if necessary.
    210     int num_parameters = info->scope()->num_parameters();
    211     for (int i = 0; i < num_parameters; i++) {
    212       Variable* var = scope()->parameter(i);
    213       if (var->IsContextSlot()) {
    214         int parameter_offset = StandardFrameConstants::kCallerSPOffset +
    215             (num_parameters - 1 - i) * kPointerSize;
    216         // Load parameter from stack.
    217         __ mov(eax, Operand(ebp, parameter_offset));
    218         // Store it in the context.
    219         int context_offset = Context::SlotOffset(var->index());
    220         __ mov(Operand(esi, context_offset), eax);
    221         // Update the write barrier. This clobbers eax and ebx.
    222         if (need_write_barrier) {
    223           __ RecordWriteContextSlot(esi,
    224                                     context_offset,
    225                                     eax,
    226                                     ebx,
    227                                     kDontSaveFPRegs);
    228         } else if (FLAG_debug_code) {
    229           Label done;
    230           __ JumpIfInNewSpace(esi, eax, &done, Label::kNear);
    231           __ Abort(kExpectedNewSpaceObject);
    232           __ bind(&done);
    233         }
    234       }
    235     }
    236   }
    237 
    238   Variable* arguments = scope()->arguments();
    239   if (arguments != NULL) {
    240     // Function uses arguments object.
    241     Comment cmnt(masm_, "[ Allocate arguments object");
    242     if (function_in_register) {
    243       __ push(edi);
    244     } else {
    245       __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
    246     }
    247     // Receiver is just before the parameters on the caller's stack.
    248     int num_parameters = info->scope()->num_parameters();
    249     int offset = num_parameters * kPointerSize;
    250     __ lea(edx,
    251            Operand(ebp, StandardFrameConstants::kCallerSPOffset + offset));
    252     __ push(edx);
    253     __ push(Immediate(Smi::FromInt(num_parameters)));
    254     // Arguments to ArgumentsAccessStub:
    255     //   function, receiver address, parameter count.
    256     // The stub will rewrite receiver and parameter count if the previous
    257     // stack frame was an arguments adapter frame.
    258     ArgumentsAccessStub::Type type;
    259     if (strict_mode() == STRICT) {
    260       type = ArgumentsAccessStub::NEW_STRICT;
    261     } else if (function()->has_duplicate_parameters()) {
    262       type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
    263     } else {
    264       type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
    265     }
    266     ArgumentsAccessStub stub(isolate(), type);
    267     __ CallStub(&stub);
    268 
    269     SetVar(arguments, eax, ebx, edx);
    270   }
    271 
    272   if (FLAG_trace) {
    273     __ CallRuntime(Runtime::kTraceEnter, 0);
    274   }
    275 
    276   // Visit the declarations and body unless there is an illegal
    277   // redeclaration.
    278   if (scope()->HasIllegalRedeclaration()) {
    279     Comment cmnt(masm_, "[ Declarations");
    280     scope()->VisitIllegalRedeclaration(this);
    281 
    282   } else {
    283     PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
    284     { Comment cmnt(masm_, "[ Declarations");
    285       // For named function expressions, declare the function name as a
    286       // constant.
    287       if (scope()->is_function_scope() && scope()->function() != NULL) {
    288         VariableDeclaration* function = scope()->function();
    289         ASSERT(function->proxy()->var()->mode() == CONST ||
    290                function->proxy()->var()->mode() == CONST_LEGACY);
    291         ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
    292         VisitVariableDeclaration(function);
    293       }
    294       VisitDeclarations(scope()->declarations());
    295     }
    296 
    297     { Comment cmnt(masm_, "[ Stack check");
    298       PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
    299       Label ok;
    300       ExternalReference stack_limit
    301           = ExternalReference::address_of_stack_limit(isolate());
    302       __ cmp(esp, Operand::StaticVariable(stack_limit));
    303       __ j(above_equal, &ok, Label::kNear);
    304       __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
    305       __ bind(&ok);
    306     }
    307 
    308     { Comment cmnt(masm_, "[ Body");
    309       ASSERT(loop_depth() == 0);
    310       VisitStatements(function()->body());
    311       ASSERT(loop_depth() == 0);
    312     }
    313   }
    314 
    315   // Always emit a 'return undefined' in case control fell off the end of
    316   // the body.
    317   { Comment cmnt(masm_, "[ return <undefined>;");
    318     __ mov(eax, isolate()->factory()->undefined_value());
    319     EmitReturnSequence();
    320   }
    321 }
    322 
    323 
    324 void FullCodeGenerator::ClearAccumulator() {
    325   __ Move(eax, Immediate(Smi::FromInt(0)));
    326 }
    327 
    328 
    329 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
    330   __ mov(ebx, Immediate(profiling_counter_));
    331   __ sub(FieldOperand(ebx, Cell::kValueOffset),
    332          Immediate(Smi::FromInt(delta)));
    333 }
    334 
    335 
    336 void FullCodeGenerator::EmitProfilingCounterReset() {
    337   int reset_value = FLAG_interrupt_budget;
    338   __ mov(ebx, Immediate(profiling_counter_));
    339   __ mov(FieldOperand(ebx, Cell::kValueOffset),
    340          Immediate(Smi::FromInt(reset_value)));
    341 }
    342 
    343 
    344 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
    345                                                 Label* back_edge_target) {
    346   Comment cmnt(masm_, "[ Back edge bookkeeping");
    347   Label ok;
    348 
    349   ASSERT(back_edge_target->is_bound());
    350   int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
    351   int weight = Min(kMaxBackEdgeWeight,
    352                    Max(1, distance / kCodeSizeMultiplier));
    353   EmitProfilingCounterDecrement(weight);
    354   __ j(positive, &ok, Label::kNear);
    355   __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
    356 
    357   // Record a mapping of this PC offset to the OSR id.  This is used to find
    358   // the AST id from the unoptimized code in order to use it as a key into
    359   // the deoptimization input data found in the optimized code.
    360   RecordBackEdge(stmt->OsrEntryId());
    361 
    362   EmitProfilingCounterReset();
    363 
    364   __ bind(&ok);
    365   PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
    366   // Record a mapping of the OSR id to this PC.  This is used if the OSR
    367   // entry becomes the target of a bailout.  We don't expect it to be, but
    368   // we want it to work if it is.
    369   PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
    370 }
    371 
    372 
    373 void FullCodeGenerator::EmitReturnSequence() {
    374   Comment cmnt(masm_, "[ Return sequence");
    375   if (return_label_.is_bound()) {
    376     __ jmp(&return_label_);
    377   } else {
    378     // Common return label
    379     __ bind(&return_label_);
    380     if (FLAG_trace) {
    381       __ push(eax);
    382       __ CallRuntime(Runtime::kTraceExit, 1);
    383     }
    384     // Pretend that the exit is a backwards jump to the entry.
    385     int weight = 1;
    386     if (info_->ShouldSelfOptimize()) {
    387       weight = FLAG_interrupt_budget / FLAG_self_opt_count;
    388     } else {
    389       int distance = masm_->pc_offset();
    390       weight = Min(kMaxBackEdgeWeight,
    391                    Max(1, distance / kCodeSizeMultiplier));
    392     }
    393     EmitProfilingCounterDecrement(weight);
    394     Label ok;
    395     __ j(positive, &ok, Label::kNear);
    396     __ push(eax);
    397     __ call(isolate()->builtins()->InterruptCheck(),
    398             RelocInfo::CODE_TARGET);
    399     __ pop(eax);
    400     EmitProfilingCounterReset();
    401     __ bind(&ok);
    402 #ifdef DEBUG
    403     // Add a label for checking the size of the code used for returning.
    404     Label check_exit_codesize;
    405     masm_->bind(&check_exit_codesize);
    406 #endif
    407     SetSourcePosition(function()->end_position() - 1);
    408     __ RecordJSReturn();
    409     // Do not use the leave instruction here because it is too short to
    410     // patch with the code required by the debugger.
    411     __ mov(esp, ebp);
    412     int no_frame_start = masm_->pc_offset();
    413     __ pop(ebp);
    414 
    415     int arguments_bytes = (info_->scope()->num_parameters() + 1) * kPointerSize;
    416     __ Ret(arguments_bytes, ecx);
    417     // Check that the size of the code used for returning is large enough
    418     // for the debugger's requirements.
    419     ASSERT(Assembler::kJSReturnSequenceLength <=
    420            masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
    421     info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
    422   }
    423 }
    424 
    425 
    426 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
    427   ASSERT(var->IsStackAllocated() || var->IsContextSlot());
    428 }
    429 
    430 
    431 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
    432   ASSERT(var->IsStackAllocated() || var->IsContextSlot());
    433   codegen()->GetVar(result_register(), var);
    434 }
    435 
    436 
    437 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
    438   ASSERT(var->IsStackAllocated() || var->IsContextSlot());
    439   MemOperand operand = codegen()->VarOperand(var, result_register());
    440   // Memory operands can be pushed directly.
    441   __ push(operand);
    442 }
    443 
    444 
    445 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
    446   // For simplicity we always test the accumulator register.
    447   codegen()->GetVar(result_register(), var);
    448   codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
    449   codegen()->DoTest(this);
    450 }
    451 
    452 
    453 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
    454   UNREACHABLE();  // Not used on IA32.
    455 }
    456 
    457 
    458 void FullCodeGenerator::AccumulatorValueContext::Plug(
    459     Heap::RootListIndex index) const {
    460   UNREACHABLE();  // Not used on IA32.
    461 }
    462 
    463 
    464 void FullCodeGenerator::StackValueContext::Plug(
    465     Heap::RootListIndex index) const {
    466   UNREACHABLE();  // Not used on IA32.
    467 }
    468 
    469 
    470 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
    471   UNREACHABLE();  // Not used on IA32.
    472 }
    473 
    474 
    475 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
    476 }
    477 
    478 
    479 void FullCodeGenerator::AccumulatorValueContext::Plug(
    480     Handle<Object> lit) const {
    481   if (lit->IsSmi()) {
    482     __ SafeMove(result_register(), Immediate(lit));
    483   } else {
    484     __ Move(result_register(), Immediate(lit));
    485   }
    486 }
    487 
    488 
    489 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
    490   if (lit->IsSmi()) {
    491     __ SafePush(Immediate(lit));
    492   } else {
    493     __ push(Immediate(lit));
    494   }
    495 }
    496 
    497 
    498 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
    499   codegen()->PrepareForBailoutBeforeSplit(condition(),
    500                                           true,
    501                                           true_label_,
    502                                           false_label_);
    503   ASSERT(!lit->IsUndetectableObject());  // There are no undetectable literals.
    504   if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
    505     if (false_label_ != fall_through_) __ jmp(false_label_);
    506   } else if (lit->IsTrue() || lit->IsJSObject()) {
    507     if (true_label_ != fall_through_) __ jmp(true_label_);
    508   } else if (lit->IsString()) {
    509     if (String::cast(*lit)->length() == 0) {
    510       if (false_label_ != fall_through_) __ jmp(false_label_);
    511     } else {
    512       if (true_label_ != fall_through_) __ jmp(true_label_);
    513     }
    514   } else if (lit->IsSmi()) {
    515     if (Smi::cast(*lit)->value() == 0) {
    516       if (false_label_ != fall_through_) __ jmp(false_label_);
    517     } else {
    518       if (true_label_ != fall_through_) __ jmp(true_label_);
    519     }
    520   } else {
    521     // For simplicity we always test the accumulator register.
    522     __ mov(result_register(), lit);
    523     codegen()->DoTest(this);
    524   }
    525 }
    526 
    527 
    528 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
    529                                                    Register reg) const {
    530   ASSERT(count > 0);
    531   __ Drop(count);
    532 }
    533 
    534 
    535 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
    536     int count,
    537     Register reg) const {
    538   ASSERT(count > 0);
    539   __ Drop(count);
    540   __ Move(result_register(), reg);
    541 }
    542 
    543 
    544 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
    545                                                        Register reg) const {
    546   ASSERT(count > 0);
    547   if (count > 1) __ Drop(count - 1);
    548   __ mov(Operand(esp, 0), reg);
    549 }
    550 
    551 
    552 void FullCodeGenerator::TestContext::DropAndPlug(int count,
    553                                                  Register reg) const {
    554   ASSERT(count > 0);
    555   // For simplicity we always test the accumulator register.
    556   __ Drop(count);
    557   __ Move(result_register(), reg);
    558   codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
    559   codegen()->DoTest(this);
    560 }
    561 
    562 
    563 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
    564                                             Label* materialize_false) const {
    565   ASSERT(materialize_true == materialize_false);
    566   __ bind(materialize_true);
    567 }
    568 
    569 
    570 void FullCodeGenerator::AccumulatorValueContext::Plug(
    571     Label* materialize_true,
    572     Label* materialize_false) const {
    573   Label done;
    574   __ bind(materialize_true);
    575   __ mov(result_register(), isolate()->factory()->true_value());
    576   __ jmp(&done, Label::kNear);
    577   __ bind(materialize_false);
    578   __ mov(result_register(), isolate()->factory()->false_value());
    579   __ bind(&done);
    580 }
    581 
    582 
    583 void FullCodeGenerator::StackValueContext::Plug(
    584     Label* materialize_true,
    585     Label* materialize_false) const {
    586   Label done;
    587   __ bind(materialize_true);
    588   __ push(Immediate(isolate()->factory()->true_value()));
    589   __ jmp(&done, Label::kNear);
    590   __ bind(materialize_false);
    591   __ push(Immediate(isolate()->factory()->false_value()));
    592   __ bind(&done);
    593 }
    594 
    595 
    596 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
    597                                           Label* materialize_false) const {
    598   ASSERT(materialize_true == true_label_);
    599   ASSERT(materialize_false == false_label_);
    600 }
    601 
    602 
    603 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
    604 }
    605 
    606 
    607 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
    608   Handle<Object> value = flag
    609       ? isolate()->factory()->true_value()
    610       : isolate()->factory()->false_value();
    611   __ mov(result_register(), value);
    612 }
    613 
    614 
    615 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
    616   Handle<Object> value = flag
    617       ? isolate()->factory()->true_value()
    618       : isolate()->factory()->false_value();
    619   __ push(Immediate(value));
    620 }
    621 
    622 
    623 void FullCodeGenerator::TestContext::Plug(bool flag) const {
    624   codegen()->PrepareForBailoutBeforeSplit(condition(),
    625                                           true,
    626                                           true_label_,
    627                                           false_label_);
    628   if (flag) {
    629     if (true_label_ != fall_through_) __ jmp(true_label_);
    630   } else {
    631     if (false_label_ != fall_through_) __ jmp(false_label_);
    632   }
    633 }
    634 
    635 
    636 void FullCodeGenerator::DoTest(Expression* condition,
    637                                Label* if_true,
    638                                Label* if_false,
    639                                Label* fall_through) {
    640   Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
    641   CallIC(ic, condition->test_id());
    642   __ test(result_register(), result_register());
    643   // The stub returns nonzero for true.
    644   Split(not_zero, if_true, if_false, fall_through);
    645 }
    646 
    647 
    648 void FullCodeGenerator::Split(Condition cc,
    649                               Label* if_true,
    650                               Label* if_false,
    651                               Label* fall_through) {
    652   if (if_false == fall_through) {
    653     __ j(cc, if_true);
    654   } else if (if_true == fall_through) {
    655     __ j(NegateCondition(cc), if_false);
    656   } else {
    657     __ j(cc, if_true);
    658     __ jmp(if_false);
    659   }
    660 }
    661 
    662 
    663 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
    664   ASSERT(var->IsStackAllocated());
    665   // Offset is negative because higher indexes are at lower addresses.
    666   int offset = -var->index() * kPointerSize;
    667   // Adjust by a (parameter or local) base offset.
    668   if (var->IsParameter()) {
    669     offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
    670   } else {
    671     offset += JavaScriptFrameConstants::kLocal0Offset;
    672   }
    673   return Operand(ebp, offset);
    674 }
    675 
    676 
    677 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
    678   ASSERT(var->IsContextSlot() || var->IsStackAllocated());
    679   if (var->IsContextSlot()) {
    680     int context_chain_length = scope()->ContextChainLength(var->scope());
    681     __ LoadContext(scratch, context_chain_length);
    682     return ContextOperand(scratch, var->index());
    683   } else {
    684     return StackOperand(var);
    685   }
    686 }
    687 
    688 
    689 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
    690   ASSERT(var->IsContextSlot() || var->IsStackAllocated());
    691   MemOperand location = VarOperand(var, dest);
    692   __ mov(dest, location);
    693 }
    694 
    695 
    696 void FullCodeGenerator::SetVar(Variable* var,
    697                                Register src,
    698                                Register scratch0,
    699                                Register scratch1) {
    700   ASSERT(var->IsContextSlot() || var->IsStackAllocated());
    701   ASSERT(!scratch0.is(src));
    702   ASSERT(!scratch0.is(scratch1));
    703   ASSERT(!scratch1.is(src));
    704   MemOperand location = VarOperand(var, scratch0);
    705   __ mov(location, src);
    706 
    707   // Emit the write barrier code if the location is in the heap.
    708   if (var->IsContextSlot()) {
    709     int offset = Context::SlotOffset(var->index());
    710     ASSERT(!scratch0.is(esi) && !src.is(esi) && !scratch1.is(esi));
    711     __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
    712   }
    713 }
    714 
    715 
    716 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
    717                                                      bool should_normalize,
    718                                                      Label* if_true,
    719                                                      Label* if_false) {
    720   // Only prepare for bailouts before splits if we're in a test
    721   // context. Otherwise, we let the Visit function deal with the
    722   // preparation to avoid preparing with the same AST id twice.
    723   if (!context()->IsTest() || !info_->IsOptimizable()) return;
    724 
    725   Label skip;
    726   if (should_normalize) __ jmp(&skip, Label::kNear);
    727   PrepareForBailout(expr, TOS_REG);
    728   if (should_normalize) {
    729     __ cmp(eax, isolate()->factory()->true_value());
    730     Split(equal, if_true, if_false, NULL);
    731     __ bind(&skip);
    732   }
    733 }
    734 
    735 
    736 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
    737   // The variable in the declaration always resides in the current context.
    738   ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
    739   if (generate_debug_code_) {
    740     // Check that we're not inside a with or catch context.
    741     __ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset));
    742     __ cmp(ebx, isolate()->factory()->with_context_map());
    743     __ Check(not_equal, kDeclarationInWithContext);
    744     __ cmp(ebx, isolate()->factory()->catch_context_map());
    745     __ Check(not_equal, kDeclarationInCatchContext);
    746   }
    747 }
    748 
    749 
    750 void FullCodeGenerator::VisitVariableDeclaration(
    751     VariableDeclaration* declaration) {
    752   // If it was not possible to allocate the variable at compile time, we
    753   // need to "declare" it at runtime to make sure it actually exists in the
    754   // local context.
    755   VariableProxy* proxy = declaration->proxy();
    756   VariableMode mode = declaration->mode();
    757   Variable* variable = proxy->var();
    758   bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
    759   switch (variable->location()) {
    760     case Variable::UNALLOCATED:
    761       globals_->Add(variable->name(), zone());
    762       globals_->Add(variable->binding_needs_init()
    763                         ? isolate()->factory()->the_hole_value()
    764                         : isolate()->factory()->undefined_value(), zone());
    765       break;
    766 
    767     case Variable::PARAMETER:
    768     case Variable::LOCAL:
    769       if (hole_init) {
    770         Comment cmnt(masm_, "[ VariableDeclaration");
    771         __ mov(StackOperand(variable),
    772                Immediate(isolate()->factory()->the_hole_value()));
    773       }
    774       break;
    775 
    776     case Variable::CONTEXT:
    777       if (hole_init) {
    778         Comment cmnt(masm_, "[ VariableDeclaration");
    779         EmitDebugCheckDeclarationContext(variable);
    780         __ mov(ContextOperand(esi, variable->index()),
    781                Immediate(isolate()->factory()->the_hole_value()));
    782         // No write barrier since the hole value is in old space.
    783         PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
    784       }
    785       break;
    786 
    787     case Variable::LOOKUP: {
    788       Comment cmnt(masm_, "[ VariableDeclaration");
    789       __ push(esi);
    790       __ push(Immediate(variable->name()));
    791       // VariableDeclaration nodes are always introduced in one of four modes.
    792       ASSERT(IsDeclaredVariableMode(mode));
    793       PropertyAttributes attr =
    794           IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
    795       __ push(Immediate(Smi::FromInt(attr)));
    796       // Push initial value, if any.
    797       // Note: For variables we must not push an initial value (such as
    798       // 'undefined') because we may have a (legal) redeclaration and we
    799       // must not destroy the current value.
    800       if (hole_init) {
    801         __ push(Immediate(isolate()->factory()->the_hole_value()));
    802       } else {
    803         __ push(Immediate(Smi::FromInt(0)));  // Indicates no initial value.
    804       }
    805       __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4);
    806       break;
    807     }
    808   }
    809 }
    810 
    811 
    812 void FullCodeGenerator::VisitFunctionDeclaration(
    813     FunctionDeclaration* declaration) {
    814   VariableProxy* proxy = declaration->proxy();
    815   Variable* variable = proxy->var();
    816   switch (variable->location()) {
    817     case Variable::UNALLOCATED: {
    818       globals_->Add(variable->name(), zone());
    819       Handle<SharedFunctionInfo> function =
    820           Compiler::BuildFunctionInfo(declaration->fun(), script());
    821       // Check for stack-overflow exception.
    822       if (function.is_null()) return SetStackOverflow();
    823       globals_->Add(function, zone());
    824       break;
    825     }
    826 
    827     case Variable::PARAMETER:
    828     case Variable::LOCAL: {
    829       Comment cmnt(masm_, "[ FunctionDeclaration");
    830       VisitForAccumulatorValue(declaration->fun());
    831       __ mov(StackOperand(variable), result_register());
    832       break;
    833     }
    834 
    835     case Variable::CONTEXT: {
    836       Comment cmnt(masm_, "[ FunctionDeclaration");
    837       EmitDebugCheckDeclarationContext(variable);
    838       VisitForAccumulatorValue(declaration->fun());
    839       __ mov(ContextOperand(esi, variable->index()), result_register());
    840       // We know that we have written a function, which is not a smi.
    841       __ RecordWriteContextSlot(esi,
    842                                 Context::SlotOffset(variable->index()),
    843                                 result_register(),
    844                                 ecx,
    845                                 kDontSaveFPRegs,
    846                                 EMIT_REMEMBERED_SET,
    847                                 OMIT_SMI_CHECK);
    848       PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
    849       break;
    850     }
    851 
    852     case Variable::LOOKUP: {
    853       Comment cmnt(masm_, "[ FunctionDeclaration");
    854       __ push(esi);
    855       __ push(Immediate(variable->name()));
    856       __ push(Immediate(Smi::FromInt(NONE)));
    857       VisitForStackValue(declaration->fun());
    858       __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4);
    859       break;
    860     }
    861   }
    862 }
    863 
    864 
    865 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
    866   Variable* variable = declaration->proxy()->var();
    867   ASSERT(variable->location() == Variable::CONTEXT);
    868   ASSERT(variable->interface()->IsFrozen());
    869 
    870   Comment cmnt(masm_, "[ ModuleDeclaration");
    871   EmitDebugCheckDeclarationContext(variable);
    872 
    873   // Load instance object.
    874   __ LoadContext(eax, scope_->ContextChainLength(scope_->GlobalScope()));
    875   __ mov(eax, ContextOperand(eax, variable->interface()->Index()));
    876   __ mov(eax, ContextOperand(eax, Context::EXTENSION_INDEX));
    877 
    878   // Assign it.
    879   __ mov(ContextOperand(esi, variable->index()), eax);
    880   // We know that we have written a module, which is not a smi.
    881   __ RecordWriteContextSlot(esi,
    882                             Context::SlotOffset(variable->index()),
    883                             eax,
    884                             ecx,
    885                             kDontSaveFPRegs,
    886                             EMIT_REMEMBERED_SET,
    887                             OMIT_SMI_CHECK);
    888   PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
    889 
    890   // Traverse into body.
    891   Visit(declaration->module());
    892 }
    893 
    894 
    895 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
    896   VariableProxy* proxy = declaration->proxy();
    897   Variable* variable = proxy->var();
    898   switch (variable->location()) {
    899     case Variable::UNALLOCATED:
    900       // TODO(rossberg)
    901       break;
    902 
    903     case Variable::CONTEXT: {
    904       Comment cmnt(masm_, "[ ImportDeclaration");
    905       EmitDebugCheckDeclarationContext(variable);
    906       // TODO(rossberg)
    907       break;
    908     }
    909 
    910     case Variable::PARAMETER:
    911     case Variable::LOCAL:
    912     case Variable::LOOKUP:
    913       UNREACHABLE();
    914   }
    915 }
    916 
    917 
    918 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
    919   // TODO(rossberg)
    920 }
    921 
    922 
    923 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
    924   // Call the runtime to declare the globals.
    925   __ push(esi);  // The context is the first argument.
    926   __ Push(pairs);
    927   __ Push(Smi::FromInt(DeclareGlobalsFlags()));
    928   __ CallRuntime(Runtime::kHiddenDeclareGlobals, 3);
    929   // Return value is ignored.
    930 }
    931 
    932 
    933 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
    934   // Call the runtime to declare the modules.
    935   __ Push(descriptions);
    936   __ CallRuntime(Runtime::kHiddenDeclareModules, 1);
    937   // Return value is ignored.
    938 }
    939 
    940 
    941 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
    942   Comment cmnt(masm_, "[ SwitchStatement");
    943   Breakable nested_statement(this, stmt);
    944   SetStatementPosition(stmt);
    945 
    946   // Keep the switch value on the stack until a case matches.
    947   VisitForStackValue(stmt->tag());
    948   PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
    949 
    950   ZoneList<CaseClause*>* clauses = stmt->cases();
    951   CaseClause* default_clause = NULL;  // Can occur anywhere in the list.
    952 
    953   Label next_test;  // Recycled for each test.
    954   // Compile all the tests with branches to their bodies.
    955   for (int i = 0; i < clauses->length(); i++) {
    956     CaseClause* clause = clauses->at(i);
    957     clause->body_target()->Unuse();
    958 
    959     // The default is not a test, but remember it as final fall through.
    960     if (clause->is_default()) {
    961       default_clause = clause;
    962       continue;
    963     }
    964 
    965     Comment cmnt(masm_, "[ Case comparison");
    966     __ bind(&next_test);
    967     next_test.Unuse();
    968 
    969     // Compile the label expression.
    970     VisitForAccumulatorValue(clause->label());
    971 
    972     // Perform the comparison as if via '==='.
    973     __ mov(edx, Operand(esp, 0));  // Switch value.
    974     bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
    975     JumpPatchSite patch_site(masm_);
    976     if (inline_smi_code) {
    977       Label slow_case;
    978       __ mov(ecx, edx);
    979       __ or_(ecx, eax);
    980       patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
    981 
    982       __ cmp(edx, eax);
    983       __ j(not_equal, &next_test);
    984       __ Drop(1);  // Switch value is no longer needed.
    985       __ jmp(clause->body_target());
    986       __ bind(&slow_case);
    987     }
    988 
    989     // Record position before stub call for type feedback.
    990     SetSourcePosition(clause->position());
    991     Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT);
    992     CallIC(ic, clause->CompareId());
    993     patch_site.EmitPatchInfo();
    994 
    995     Label skip;
    996     __ jmp(&skip, Label::kNear);
    997     PrepareForBailout(clause, TOS_REG);
    998     __ cmp(eax, isolate()->factory()->true_value());
    999     __ j(not_equal, &next_test);
   1000     __ Drop(1);
   1001     __ jmp(clause->body_target());
   1002     __ bind(&skip);
   1003 
   1004     __ test(eax, eax);
   1005     __ j(not_equal, &next_test);
   1006     __ Drop(1);  // Switch value is no longer needed.
   1007     __ jmp(clause->body_target());
   1008   }
   1009 
   1010   // Discard the test value and jump to the default if present, otherwise to
   1011   // the end of the statement.
   1012   __ bind(&next_test);
   1013   __ Drop(1);  // Switch value is no longer needed.
   1014   if (default_clause == NULL) {
   1015     __ jmp(nested_statement.break_label());
   1016   } else {
   1017     __ jmp(default_clause->body_target());
   1018   }
   1019 
   1020   // Compile all the case bodies.
   1021   for (int i = 0; i < clauses->length(); i++) {
   1022     Comment cmnt(masm_, "[ Case body");
   1023     CaseClause* clause = clauses->at(i);
   1024     __ bind(clause->body_target());
   1025     PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
   1026     VisitStatements(clause->statements());
   1027   }
   1028 
   1029   __ bind(nested_statement.break_label());
   1030   PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
   1031 }
   1032 
   1033 
   1034 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
   1035   Comment cmnt(masm_, "[ ForInStatement");
   1036   int slot = stmt->ForInFeedbackSlot();
   1037 
   1038   SetStatementPosition(stmt);
   1039 
   1040   Label loop, exit;
   1041   ForIn loop_statement(this, stmt);
   1042   increment_loop_depth();
   1043 
   1044   // Get the object to enumerate over. If the object is null or undefined, skip
   1045   // over the loop.  See ECMA-262 version 5, section 12.6.4.
   1046   VisitForAccumulatorValue(stmt->enumerable());
   1047   __ cmp(eax, isolate()->factory()->undefined_value());
   1048   __ j(equal, &exit);
   1049   __ cmp(eax, isolate()->factory()->null_value());
   1050   __ j(equal, &exit);
   1051 
   1052   PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
   1053 
   1054   // Convert the object to a JS object.
   1055   Label convert, done_convert;
   1056   __ JumpIfSmi(eax, &convert, Label::kNear);
   1057   __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
   1058   __ j(above_equal, &done_convert, Label::kNear);
   1059   __ bind(&convert);
   1060   __ push(eax);
   1061   __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
   1062   __ bind(&done_convert);
   1063   __ push(eax);
   1064 
   1065   // Check for proxies.
   1066   Label call_runtime, use_cache, fixed_array;
   1067   STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
   1068   __ CmpObjectType(eax, LAST_JS_PROXY_TYPE, ecx);
   1069   __ j(below_equal, &call_runtime);
   1070 
   1071   // Check cache validity in generated code. This is a fast case for
   1072   // the JSObject::IsSimpleEnum cache validity checks. If we cannot
   1073   // guarantee cache validity, call the runtime system to check cache
   1074   // validity or get the property names in a fixed array.
   1075   __ CheckEnumCache(&call_runtime);
   1076 
   1077   __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
   1078   __ jmp(&use_cache, Label::kNear);
   1079 
   1080   // Get the set of properties to enumerate.
   1081   __ bind(&call_runtime);
   1082   __ push(eax);
   1083   __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
   1084   __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
   1085          isolate()->factory()->meta_map());
   1086   __ j(not_equal, &fixed_array);
   1087 
   1088 
   1089   // We got a map in register eax. Get the enumeration cache from it.
   1090   Label no_descriptors;
   1091   __ bind(&use_cache);
   1092 
   1093   __ EnumLength(edx, eax);
   1094   __ cmp(edx, Immediate(Smi::FromInt(0)));
   1095   __ j(equal, &no_descriptors);
   1096 
   1097   __ LoadInstanceDescriptors(eax, ecx);
   1098   __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheOffset));
   1099   __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
   1100 
   1101   // Set up the four remaining stack slots.
   1102   __ push(eax);  // Map.
   1103   __ push(ecx);  // Enumeration cache.
   1104   __ push(edx);  // Number of valid entries for the map in the enum cache.
   1105   __ push(Immediate(Smi::FromInt(0)));  // Initial index.
   1106   __ jmp(&loop);
   1107 
   1108   __ bind(&no_descriptors);
   1109   __ add(esp, Immediate(kPointerSize));
   1110   __ jmp(&exit);
   1111 
   1112   // We got a fixed array in register eax. Iterate through that.
   1113   Label non_proxy;
   1114   __ bind(&fixed_array);
   1115 
   1116   // No need for a write barrier, we are storing a Smi in the feedback vector.
   1117   __ LoadHeapObject(ebx, FeedbackVector());
   1118   __ mov(FieldOperand(ebx, FixedArray::OffsetOfElementAt(slot)),
   1119          Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate())));
   1120 
   1121   __ mov(ebx, Immediate(Smi::FromInt(1)));  // Smi indicates slow check
   1122   __ mov(ecx, Operand(esp, 0 * kPointerSize));  // Get enumerated object
   1123   STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
   1124   __ CmpObjectType(ecx, LAST_JS_PROXY_TYPE, ecx);
   1125   __ j(above, &non_proxy);
   1126   __ Move(ebx, Immediate(Smi::FromInt(0)));  // Zero indicates proxy
   1127   __ bind(&non_proxy);
   1128   __ push(ebx);  // Smi
   1129   __ push(eax);  // Array
   1130   __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset));
   1131   __ push(eax);  // Fixed array length (as smi).
   1132   __ push(Immediate(Smi::FromInt(0)));  // Initial index.
   1133 
   1134   // Generate code for doing the condition check.
   1135   PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
   1136   __ bind(&loop);
   1137   __ mov(eax, Operand(esp, 0 * kPointerSize));  // Get the current index.
   1138   __ cmp(eax, Operand(esp, 1 * kPointerSize));  // Compare to the array length.
   1139   __ j(above_equal, loop_statement.break_label());
   1140 
   1141   // Get the current entry of the array into register ebx.
   1142   __ mov(ebx, Operand(esp, 2 * kPointerSize));
   1143   __ mov(ebx, FieldOperand(ebx, eax, times_2, FixedArray::kHeaderSize));
   1144 
   1145   // Get the expected map from the stack or a smi in the
   1146   // permanent slow case into register edx.
   1147   __ mov(edx, Operand(esp, 3 * kPointerSize));
   1148 
   1149   // Check if the expected map still matches that of the enumerable.
   1150   // If not, we may have to filter the key.
   1151   Label update_each;
   1152   __ mov(ecx, Operand(esp, 4 * kPointerSize));
   1153   __ cmp(edx, FieldOperand(ecx, HeapObject::kMapOffset));
   1154   __ j(equal, &update_each, Label::kNear);
   1155 
   1156   // For proxies, no filtering is done.
   1157   // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
   1158   ASSERT(Smi::FromInt(0) == 0);
   1159   __ test(edx, edx);
   1160   __ j(zero, &update_each);
   1161 
   1162   // Convert the entry to a string or null if it isn't a property
   1163   // anymore. If the property has been removed while iterating, we
   1164   // just skip it.
   1165   __ push(ecx);  // Enumerable.
   1166   __ push(ebx);  // Current entry.
   1167   __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
   1168   __ test(eax, eax);
   1169   __ j(equal, loop_statement.continue_label());
   1170   __ mov(ebx, eax);
   1171 
   1172   // Update the 'each' property or variable from the possibly filtered
   1173   // entry in register ebx.
   1174   __ bind(&update_each);
   1175   __ mov(result_register(), ebx);
   1176   // Perform the assignment as if via '='.
   1177   { EffectContext context(this);
   1178     EmitAssignment(stmt->each());
   1179   }
   1180 
   1181   // Generate code for the body of the loop.
   1182   Visit(stmt->body());
   1183 
   1184   // Generate code for going to the next element by incrementing the
   1185   // index (smi) stored on top of the stack.
   1186   __ bind(loop_statement.continue_label());
   1187   __ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1)));
   1188 
   1189   EmitBackEdgeBookkeeping(stmt, &loop);
   1190   __ jmp(&loop);
   1191 
   1192   // Remove the pointers stored on the stack.
   1193   __ bind(loop_statement.break_label());
   1194   __ add(esp, Immediate(5 * kPointerSize));
   1195 
   1196   // Exit and decrement the loop depth.
   1197   PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
   1198   __ bind(&exit);
   1199   decrement_loop_depth();
   1200 }
   1201 
   1202 
   1203 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
   1204   Comment cmnt(masm_, "[ ForOfStatement");
   1205   SetStatementPosition(stmt);
   1206 
   1207   Iteration loop_statement(this, stmt);
   1208   increment_loop_depth();
   1209 
   1210   // var iterable = subject
   1211   VisitForAccumulatorValue(stmt->assign_iterable());
   1212 
   1213   // As with for-in, skip the loop if the iterator is null or undefined.
   1214   __ CompareRoot(eax, Heap::kUndefinedValueRootIndex);
   1215   __ j(equal, loop_statement.break_label());
   1216   __ CompareRoot(eax, Heap::kNullValueRootIndex);
   1217   __ j(equal, loop_statement.break_label());
   1218 
   1219   // var iterator = iterable[Symbol.iterator]();
   1220   VisitForEffect(stmt->assign_iterator());
   1221 
   1222   // Loop entry.
   1223   __ bind(loop_statement.continue_label());
   1224 
   1225   // result = iterator.next()
   1226   VisitForEffect(stmt->next_result());
   1227 
   1228   // if (result.done) break;
   1229   Label result_not_done;
   1230   VisitForControl(stmt->result_done(),
   1231                   loop_statement.break_label(),
   1232                   &result_not_done,
   1233                   &result_not_done);
   1234   __ bind(&result_not_done);
   1235 
   1236   // each = result.value
   1237   VisitForEffect(stmt->assign_each());
   1238 
   1239   // Generate code for the body of the loop.
   1240   Visit(stmt->body());
   1241 
   1242   // Check stack before looping.
   1243   PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
   1244   EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
   1245   __ jmp(loop_statement.continue_label());
   1246 
   1247   // Exit and decrement the loop depth.
   1248   PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
   1249   __ bind(loop_statement.break_label());
   1250   decrement_loop_depth();
   1251 }
   1252 
   1253 
   1254 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
   1255                                        bool pretenure) {
   1256   // Use the fast case closure allocation code that allocates in new
   1257   // space for nested functions that don't need literals cloning. If
   1258   // we're running with the --always-opt or the --prepare-always-opt
   1259   // flag, we need to use the runtime function so that the new function
   1260   // we are creating here gets a chance to have its code optimized and
   1261   // doesn't just get a copy of the existing unoptimized code.
   1262   if (!FLAG_always_opt &&
   1263       !FLAG_prepare_always_opt &&
   1264       !pretenure &&
   1265       scope()->is_function_scope() &&
   1266       info->num_literals() == 0) {
   1267     FastNewClosureStub stub(isolate(),
   1268                             info->strict_mode(),
   1269                             info->is_generator());
   1270     __ mov(ebx, Immediate(info));
   1271     __ CallStub(&stub);
   1272   } else {
   1273     __ push(esi);
   1274     __ push(Immediate(info));
   1275     __ push(Immediate(pretenure
   1276                       ? isolate()->factory()->true_value()
   1277                       : isolate()->factory()->false_value()));
   1278     __ CallRuntime(Runtime::kHiddenNewClosure, 3);
   1279   }
   1280   context()->Plug(eax);
   1281 }
   1282 
   1283 
   1284 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
   1285   Comment cmnt(masm_, "[ VariableProxy");
   1286   EmitVariableLoad(expr);
   1287 }
   1288 
   1289 
   1290 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
   1291                                                       TypeofState typeof_state,
   1292                                                       Label* slow) {
   1293   Register context = esi;
   1294   Register temp = edx;
   1295 
   1296   Scope* s = scope();
   1297   while (s != NULL) {
   1298     if (s->num_heap_slots() > 0) {
   1299       if (s->calls_sloppy_eval()) {
   1300         // Check that extension is NULL.
   1301         __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
   1302                Immediate(0));
   1303         __ j(not_equal, slow);
   1304       }
   1305       // Load next context in chain.
   1306       __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
   1307       // Walk the rest of the chain without clobbering esi.
   1308       context = temp;
   1309     }
   1310     // If no outer scope calls eval, we do not need to check more
   1311     // context extensions.  If we have reached an eval scope, we check
   1312     // all extensions from this point.
   1313     if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
   1314     s = s->outer_scope();
   1315   }
   1316 
   1317   if (s != NULL && s->is_eval_scope()) {
   1318     // Loop up the context chain.  There is no frame effect so it is
   1319     // safe to use raw labels here.
   1320     Label next, fast;
   1321     if (!context.is(temp)) {
   1322       __ mov(temp, context);
   1323     }
   1324     __ bind(&next);
   1325     // Terminate at native context.
   1326     __ cmp(FieldOperand(temp, HeapObject::kMapOffset),
   1327            Immediate(isolate()->factory()->native_context_map()));
   1328     __ j(equal, &fast, Label::kNear);
   1329     // Check that extension is NULL.
   1330     __ cmp(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
   1331     __ j(not_equal, slow);
   1332     // Load next context in chain.
   1333     __ mov(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
   1334     __ jmp(&next);
   1335     __ bind(&fast);
   1336   }
   1337 
   1338   // All extension objects were empty and it is safe to use a global
   1339   // load IC call.
   1340   __ mov(edx, GlobalObjectOperand());
   1341   __ mov(ecx, var->name());
   1342   ContextualMode mode = (typeof_state == INSIDE_TYPEOF)
   1343       ? NOT_CONTEXTUAL
   1344       : CONTEXTUAL;
   1345 
   1346   CallLoadIC(mode);
   1347 }
   1348 
   1349 
   1350 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
   1351                                                                 Label* slow) {
   1352   ASSERT(var->IsContextSlot());
   1353   Register context = esi;
   1354   Register temp = ebx;
   1355 
   1356   for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
   1357     if (s->num_heap_slots() > 0) {
   1358       if (s->calls_sloppy_eval()) {
   1359         // Check that extension is NULL.
   1360         __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
   1361                Immediate(0));
   1362         __ j(not_equal, slow);
   1363       }
   1364       __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
   1365       // Walk the rest of the chain without clobbering esi.
   1366       context = temp;
   1367     }
   1368   }
   1369   // Check that last extension is NULL.
   1370   __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
   1371   __ j(not_equal, slow);
   1372 
   1373   // This function is used only for loads, not stores, so it's safe to
   1374   // return an esi-based operand (the write barrier cannot be allowed to
   1375   // destroy the esi register).
   1376   return ContextOperand(context, var->index());
   1377 }
   1378 
   1379 
   1380 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
   1381                                                   TypeofState typeof_state,
   1382                                                   Label* slow,
   1383                                                   Label* done) {
   1384   // Generate fast-case code for variables that might be shadowed by
   1385   // eval-introduced variables.  Eval is used a lot without
   1386   // introducing variables.  In those cases, we do not want to
   1387   // perform a runtime call for all variables in the scope
   1388   // containing the eval.
   1389   if (var->mode() == DYNAMIC_GLOBAL) {
   1390     EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
   1391     __ jmp(done);
   1392   } else if (var->mode() == DYNAMIC_LOCAL) {
   1393     Variable* local = var->local_if_not_shadowed();
   1394     __ mov(eax, ContextSlotOperandCheckExtensions(local, slow));
   1395     if (local->mode() == LET || local->mode() == CONST ||
   1396         local->mode() == CONST_LEGACY) {
   1397       __ cmp(eax, isolate()->factory()->the_hole_value());
   1398       __ j(not_equal, done);
   1399       if (local->mode() == CONST_LEGACY) {
   1400         __ mov(eax, isolate()->factory()->undefined_value());
   1401       } else {  // LET || CONST
   1402         __ push(Immediate(var->name()));
   1403         __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
   1404       }
   1405     }
   1406     __ jmp(done);
   1407   }
   1408 }
   1409 
   1410 
   1411 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
   1412   // Record position before possible IC call.
   1413   SetSourcePosition(proxy->position());
   1414   Variable* var = proxy->var();
   1415 
   1416   // Three cases: global variables, lookup variables, and all other types of
   1417   // variables.
   1418   switch (var->location()) {
   1419     case Variable::UNALLOCATED: {
   1420       Comment cmnt(masm_, "[ Global variable");
   1421       // Use inline caching. Variable name is passed in ecx and the global
   1422       // object in eax.
   1423       __ mov(edx, GlobalObjectOperand());
   1424       __ mov(ecx, var->name());
   1425       CallLoadIC(CONTEXTUAL);
   1426       context()->Plug(eax);
   1427       break;
   1428     }
   1429 
   1430     case Variable::PARAMETER:
   1431     case Variable::LOCAL:
   1432     case Variable::CONTEXT: {
   1433       Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
   1434                                                : "[ Stack variable");
   1435       if (var->binding_needs_init()) {
   1436         // var->scope() may be NULL when the proxy is located in eval code and
   1437         // refers to a potential outside binding. Currently those bindings are
   1438         // always looked up dynamically, i.e. in that case
   1439         //     var->location() == LOOKUP.
   1440         // always holds.
   1441         ASSERT(var->scope() != NULL);
   1442 
   1443         // Check if the binding really needs an initialization check. The check
   1444         // can be skipped in the following situation: we have a LET or CONST
   1445         // binding in harmony mode, both the Variable and the VariableProxy have
   1446         // the same declaration scope (i.e. they are both in global code, in the
   1447         // same function or in the same eval code) and the VariableProxy is in
   1448         // the source physically located after the initializer of the variable.
   1449         //
   1450         // We cannot skip any initialization checks for CONST in non-harmony
   1451         // mode because const variables may be declared but never initialized:
   1452         //   if (false) { const x; }; var y = x;
   1453         //
   1454         // The condition on the declaration scopes is a conservative check for
   1455         // nested functions that access a binding and are called before the
   1456         // binding is initialized:
   1457         //   function() { f(); let x = 1; function f() { x = 2; } }
   1458         //
   1459         bool skip_init_check;
   1460         if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
   1461           skip_init_check = false;
   1462         } else {
   1463           // Check that we always have valid source position.
   1464           ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
   1465           ASSERT(proxy->position() != RelocInfo::kNoPosition);
   1466           skip_init_check = var->mode() != CONST_LEGACY &&
   1467               var->initializer_position() < proxy->position();
   1468         }
   1469 
   1470         if (!skip_init_check) {
   1471           // Let and const need a read barrier.
   1472           Label done;
   1473           GetVar(eax, var);
   1474           __ cmp(eax, isolate()->factory()->the_hole_value());
   1475           __ j(not_equal, &done, Label::kNear);
   1476           if (var->mode() == LET || var->mode() == CONST) {
   1477             // Throw a reference error when using an uninitialized let/const
   1478             // binding in harmony mode.
   1479             __ push(Immediate(var->name()));
   1480             __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
   1481           } else {
   1482             // Uninitalized const bindings outside of harmony mode are unholed.
   1483             ASSERT(var->mode() == CONST_LEGACY);
   1484             __ mov(eax, isolate()->factory()->undefined_value());
   1485           }
   1486           __ bind(&done);
   1487           context()->Plug(eax);
   1488           break;
   1489         }
   1490       }
   1491       context()->Plug(var);
   1492       break;
   1493     }
   1494 
   1495     case Variable::LOOKUP: {
   1496       Comment cmnt(masm_, "[ Lookup variable");
   1497       Label done, slow;
   1498       // Generate code for loading from variables potentially shadowed
   1499       // by eval-introduced variables.
   1500       EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
   1501       __ bind(&slow);
   1502       __ push(esi);  // Context.
   1503       __ push(Immediate(var->name()));
   1504       __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2);
   1505       __ bind(&done);
   1506       context()->Plug(eax);
   1507       break;
   1508     }
   1509   }
   1510 }
   1511 
   1512 
   1513 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
   1514   Comment cmnt(masm_, "[ RegExpLiteral");
   1515   Label materialized;
   1516   // Registers will be used as follows:
   1517   // edi = JS function.
   1518   // ecx = literals array.
   1519   // ebx = regexp literal.
   1520   // eax = regexp literal clone.
   1521   __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
   1522   __ mov(ecx, FieldOperand(edi, JSFunction::kLiteralsOffset));
   1523   int literal_offset =
   1524       FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
   1525   __ mov(ebx, FieldOperand(ecx, literal_offset));
   1526   __ cmp(ebx, isolate()->factory()->undefined_value());
   1527   __ j(not_equal, &materialized, Label::kNear);
   1528 
   1529   // Create regexp literal using runtime function
   1530   // Result will be in eax.
   1531   __ push(ecx);
   1532   __ push(Immediate(Smi::FromInt(expr->literal_index())));
   1533   __ push(Immediate(expr->pattern()));
   1534   __ push(Immediate(expr->flags()));
   1535   __ CallRuntime(Runtime::kHiddenMaterializeRegExpLiteral, 4);
   1536   __ mov(ebx, eax);
   1537 
   1538   __ bind(&materialized);
   1539   int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
   1540   Label allocated, runtime_allocate;
   1541   __ Allocate(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
   1542   __ jmp(&allocated);
   1543 
   1544   __ bind(&runtime_allocate);
   1545   __ push(ebx);
   1546   __ push(Immediate(Smi::FromInt(size)));
   1547   __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1);
   1548   __ pop(ebx);
   1549 
   1550   __ bind(&allocated);
   1551   // Copy the content into the newly allocated memory.
   1552   // (Unroll copy loop once for better throughput).
   1553   for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
   1554     __ mov(edx, FieldOperand(ebx, i));
   1555     __ mov(ecx, FieldOperand(ebx, i + kPointerSize));
   1556     __ mov(FieldOperand(eax, i), edx);
   1557     __ mov(FieldOperand(eax, i + kPointerSize), ecx);
   1558   }
   1559   if ((size % (2 * kPointerSize)) != 0) {
   1560     __ mov(edx, FieldOperand(ebx, size - kPointerSize));
   1561     __ mov(FieldOperand(eax, size - kPointerSize), edx);
   1562   }
   1563   context()->Plug(eax);
   1564 }
   1565 
   1566 
   1567 void FullCodeGenerator::EmitAccessor(Expression* expression) {
   1568   if (expression == NULL) {
   1569     __ push(Immediate(isolate()->factory()->null_value()));
   1570   } else {
   1571     VisitForStackValue(expression);
   1572   }
   1573 }
   1574 
   1575 
   1576 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
   1577   Comment cmnt(masm_, "[ ObjectLiteral");
   1578 
   1579   expr->BuildConstantProperties(isolate());
   1580   Handle<FixedArray> constant_properties = expr->constant_properties();
   1581   int flags = expr->fast_elements()
   1582       ? ObjectLiteral::kFastElements
   1583       : ObjectLiteral::kNoFlags;
   1584   flags |= expr->has_function()
   1585       ? ObjectLiteral::kHasFunction
   1586       : ObjectLiteral::kNoFlags;
   1587   int properties_count = constant_properties->length() / 2;
   1588   if (expr->may_store_doubles() || expr->depth() > 1 ||
   1589       masm()->serializer_enabled() ||
   1590       flags != ObjectLiteral::kFastElements ||
   1591       properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
   1592     __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
   1593     __ push(FieldOperand(edi, JSFunction::kLiteralsOffset));
   1594     __ push(Immediate(Smi::FromInt(expr->literal_index())));
   1595     __ push(Immediate(constant_properties));
   1596     __ push(Immediate(Smi::FromInt(flags)));
   1597     __ CallRuntime(Runtime::kHiddenCreateObjectLiteral, 4);
   1598   } else {
   1599     __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
   1600     __ mov(eax, FieldOperand(edi, JSFunction::kLiteralsOffset));
   1601     __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
   1602     __ mov(ecx, Immediate(constant_properties));
   1603     __ mov(edx, Immediate(Smi::FromInt(flags)));
   1604     FastCloneShallowObjectStub stub(isolate(), properties_count);
   1605     __ CallStub(&stub);
   1606   }
   1607 
   1608   // If result_saved is true the result is on top of the stack.  If
   1609   // result_saved is false the result is in eax.
   1610   bool result_saved = false;
   1611 
   1612   // Mark all computed expressions that are bound to a key that
   1613   // is shadowed by a later occurrence of the same key. For the
   1614   // marked expressions, no store code is emitted.
   1615   expr->CalculateEmitStore(zone());
   1616 
   1617   AccessorTable accessor_table(zone());
   1618   for (int i = 0; i < expr->properties()->length(); i++) {
   1619     ObjectLiteral::Property* property = expr->properties()->at(i);
   1620     if (property->IsCompileTimeValue()) continue;
   1621 
   1622     Literal* key = property->key();
   1623     Expression* value = property->value();
   1624     if (!result_saved) {
   1625       __ push(eax);  // Save result on the stack
   1626       result_saved = true;
   1627     }
   1628     switch (property->kind()) {
   1629       case ObjectLiteral::Property::CONSTANT:
   1630         UNREACHABLE();
   1631       case ObjectLiteral::Property::MATERIALIZED_LITERAL:
   1632         ASSERT(!CompileTimeValue::IsCompileTimeValue(value));
   1633         // Fall through.
   1634       case ObjectLiteral::Property::COMPUTED:
   1635         if (key->value()->IsInternalizedString()) {
   1636           if (property->emit_store()) {
   1637             VisitForAccumulatorValue(value);
   1638             __ mov(ecx, Immediate(key->value()));
   1639             __ mov(edx, Operand(esp, 0));
   1640             CallStoreIC(key->LiteralFeedbackId());
   1641             PrepareForBailoutForId(key->id(), NO_REGISTERS);
   1642           } else {
   1643             VisitForEffect(value);
   1644           }
   1645           break;
   1646         }
   1647         __ push(Operand(esp, 0));  // Duplicate receiver.
   1648         VisitForStackValue(key);
   1649         VisitForStackValue(value);
   1650         if (property->emit_store()) {
   1651           __ push(Immediate(Smi::FromInt(NONE)));  // PropertyAttributes
   1652           __ CallRuntime(Runtime::kSetProperty, 4);
   1653         } else {
   1654           __ Drop(3);
   1655         }
   1656         break;
   1657       case ObjectLiteral::Property::PROTOTYPE:
   1658         __ push(Operand(esp, 0));  // Duplicate receiver.
   1659         VisitForStackValue(value);
   1660         if (property->emit_store()) {
   1661           __ CallRuntime(Runtime::kSetPrototype, 2);
   1662         } else {
   1663           __ Drop(2);
   1664         }
   1665         break;
   1666       case ObjectLiteral::Property::GETTER:
   1667         accessor_table.lookup(key)->second->getter = value;
   1668         break;
   1669       case ObjectLiteral::Property::SETTER:
   1670         accessor_table.lookup(key)->second->setter = value;
   1671         break;
   1672     }
   1673   }
   1674 
   1675   // Emit code to define accessors, using only a single call to the runtime for
   1676   // each pair of corresponding getters and setters.
   1677   for (AccessorTable::Iterator it = accessor_table.begin();
   1678        it != accessor_table.end();
   1679        ++it) {
   1680     __ push(Operand(esp, 0));  // Duplicate receiver.
   1681     VisitForStackValue(it->first);
   1682     EmitAccessor(it->second->getter);
   1683     EmitAccessor(it->second->setter);
   1684     __ push(Immediate(Smi::FromInt(NONE)));
   1685     __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
   1686   }
   1687 
   1688   if (expr->has_function()) {
   1689     ASSERT(result_saved);
   1690     __ push(Operand(esp, 0));
   1691     __ CallRuntime(Runtime::kToFastProperties, 1);
   1692   }
   1693 
   1694   if (result_saved) {
   1695     context()->PlugTOS();
   1696   } else {
   1697     context()->Plug(eax);
   1698   }
   1699 }
   1700 
   1701 
   1702 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
   1703   Comment cmnt(masm_, "[ ArrayLiteral");
   1704 
   1705   expr->BuildConstantElements(isolate());
   1706   int flags = expr->depth() == 1
   1707       ? ArrayLiteral::kShallowElements
   1708       : ArrayLiteral::kNoFlags;
   1709 
   1710   ZoneList<Expression*>* subexprs = expr->values();
   1711   int length = subexprs->length();
   1712   Handle<FixedArray> constant_elements = expr->constant_elements();
   1713   ASSERT_EQ(2, constant_elements->length());
   1714   ElementsKind constant_elements_kind =
   1715       static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
   1716   bool has_constant_fast_elements =
   1717       IsFastObjectElementsKind(constant_elements_kind);
   1718   Handle<FixedArrayBase> constant_elements_values(
   1719       FixedArrayBase::cast(constant_elements->get(1)));
   1720 
   1721   AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
   1722   if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) {
   1723     // If the only customer of allocation sites is transitioning, then
   1724     // we can turn it off if we don't have anywhere else to transition to.
   1725     allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
   1726   }
   1727 
   1728   if (expr->depth() > 1 || length > JSObject::kInitialMaxFastElementArray) {
   1729     __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
   1730     __ push(FieldOperand(ebx, JSFunction::kLiteralsOffset));
   1731     __ push(Immediate(Smi::FromInt(expr->literal_index())));
   1732     __ push(Immediate(constant_elements));
   1733     __ push(Immediate(Smi::FromInt(flags)));
   1734     __ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);
   1735   } else {
   1736     __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
   1737     __ mov(eax, FieldOperand(ebx, JSFunction::kLiteralsOffset));
   1738     __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
   1739     __ mov(ecx, Immediate(constant_elements));
   1740     FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
   1741     __ CallStub(&stub);
   1742   }
   1743 
   1744   bool result_saved = false;  // Is the result saved to the stack?
   1745 
   1746   // Emit code to evaluate all the non-constant subexpressions and to store
   1747   // them into the newly cloned array.
   1748   for (int i = 0; i < length; i++) {
   1749     Expression* subexpr = subexprs->at(i);
   1750     // If the subexpression is a literal or a simple materialized literal it
   1751     // is already set in the cloned array.
   1752     if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
   1753 
   1754     if (!result_saved) {
   1755       __ push(eax);  // array literal.
   1756       __ push(Immediate(Smi::FromInt(expr->literal_index())));
   1757       result_saved = true;
   1758     }
   1759     VisitForAccumulatorValue(subexpr);
   1760 
   1761     if (IsFastObjectElementsKind(constant_elements_kind)) {
   1762       // Fast-case array literal with ElementsKind of FAST_*_ELEMENTS, they
   1763       // cannot transition and don't need to call the runtime stub.
   1764       int offset = FixedArray::kHeaderSize + (i * kPointerSize);
   1765       __ mov(ebx, Operand(esp, kPointerSize));  // Copy of array literal.
   1766       __ mov(ebx, FieldOperand(ebx, JSObject::kElementsOffset));
   1767       // Store the subexpression value in the array's elements.
   1768       __ mov(FieldOperand(ebx, offset), result_register());
   1769       // Update the write barrier for the array store.
   1770       __ RecordWriteField(ebx, offset, result_register(), ecx,
   1771                           kDontSaveFPRegs,
   1772                           EMIT_REMEMBERED_SET,
   1773                           INLINE_SMI_CHECK);
   1774     } else {
   1775       // Store the subexpression value in the array's elements.
   1776       __ mov(ecx, Immediate(Smi::FromInt(i)));
   1777       StoreArrayLiteralElementStub stub(isolate());
   1778       __ CallStub(&stub);
   1779     }
   1780 
   1781     PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
   1782   }
   1783 
   1784   if (result_saved) {
   1785     __ add(esp, Immediate(kPointerSize));  // literal index
   1786     context()->PlugTOS();
   1787   } else {
   1788     context()->Plug(eax);
   1789   }
   1790 }
   1791 
   1792 
   1793 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
   1794   ASSERT(expr->target()->IsValidReferenceExpression());
   1795 
   1796   Comment cmnt(masm_, "[ Assignment");
   1797 
   1798   // Left-hand side can only be a property, a global or a (parameter or local)
   1799   // slot.
   1800   enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
   1801   LhsKind assign_type = VARIABLE;
   1802   Property* property = expr->target()->AsProperty();
   1803   if (property != NULL) {
   1804     assign_type = (property->key()->IsPropertyName())
   1805         ? NAMED_PROPERTY
   1806         : KEYED_PROPERTY;
   1807   }
   1808 
   1809   // Evaluate LHS expression.
   1810   switch (assign_type) {
   1811     case VARIABLE:
   1812       // Nothing to do here.
   1813       break;
   1814     case NAMED_PROPERTY:
   1815       if (expr->is_compound()) {
   1816         // We need the receiver both on the stack and in edx.
   1817         VisitForStackValue(property->obj());
   1818         __ mov(edx, Operand(esp, 0));
   1819       } else {
   1820         VisitForStackValue(property->obj());
   1821       }
   1822       break;
   1823     case KEYED_PROPERTY: {
   1824       if (expr->is_compound()) {
   1825         VisitForStackValue(property->obj());
   1826         VisitForStackValue(property->key());
   1827         __ mov(edx, Operand(esp, kPointerSize));  // Object.
   1828         __ mov(ecx, Operand(esp, 0));             // Key.
   1829       } else {
   1830         VisitForStackValue(property->obj());
   1831         VisitForStackValue(property->key());
   1832       }
   1833       break;
   1834     }
   1835   }
   1836 
   1837   // For compound assignments we need another deoptimization point after the
   1838   // variable/property load.
   1839   if (expr->is_compound()) {
   1840     AccumulatorValueContext result_context(this);
   1841     { AccumulatorValueContext left_operand_context(this);
   1842       switch (assign_type) {
   1843         case VARIABLE:
   1844           EmitVariableLoad(expr->target()->AsVariableProxy());
   1845           PrepareForBailout(expr->target(), TOS_REG);
   1846           break;
   1847         case NAMED_PROPERTY:
   1848           EmitNamedPropertyLoad(property);
   1849           PrepareForBailoutForId(property->LoadId(), TOS_REG);
   1850           break;
   1851         case KEYED_PROPERTY:
   1852           EmitKeyedPropertyLoad(property);
   1853           PrepareForBailoutForId(property->LoadId(), TOS_REG);
   1854           break;
   1855       }
   1856     }
   1857 
   1858     Token::Value op = expr->binary_op();
   1859     __ push(eax);  // Left operand goes on the stack.
   1860     VisitForAccumulatorValue(expr->value());
   1861 
   1862     OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
   1863         ? OVERWRITE_RIGHT
   1864         : NO_OVERWRITE;
   1865     SetSourcePosition(expr->position() + 1);
   1866     if (ShouldInlineSmiCase(op)) {
   1867       EmitInlineSmiBinaryOp(expr->binary_operation(),
   1868                             op,
   1869                             mode,
   1870                             expr->target(),
   1871                             expr->value());
   1872     } else {
   1873       EmitBinaryOp(expr->binary_operation(), op, mode);
   1874     }
   1875 
   1876     // Deoptimization point in case the binary operation may have side effects.
   1877     PrepareForBailout(expr->binary_operation(), TOS_REG);
   1878   } else {
   1879     VisitForAccumulatorValue(expr->value());
   1880   }
   1881 
   1882   // Record source position before possible IC call.
   1883   SetSourcePosition(expr->position());
   1884 
   1885   // Store the value.
   1886   switch (assign_type) {
   1887     case VARIABLE:
   1888       EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
   1889                              expr->op());
   1890       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   1891       context()->Plug(eax);
   1892       break;
   1893     case NAMED_PROPERTY:
   1894       EmitNamedPropertyAssignment(expr);
   1895       break;
   1896     case KEYED_PROPERTY:
   1897       EmitKeyedPropertyAssignment(expr);
   1898       break;
   1899   }
   1900 }
   1901 
   1902 
   1903 void FullCodeGenerator::VisitYield(Yield* expr) {
   1904   Comment cmnt(masm_, "[ Yield");
   1905   // Evaluate yielded value first; the initial iterator definition depends on
   1906   // this.  It stays on the stack while we update the iterator.
   1907   VisitForStackValue(expr->expression());
   1908 
   1909   switch (expr->yield_kind()) {
   1910     case Yield::SUSPEND:
   1911       // Pop value from top-of-stack slot; box result into result register.
   1912       EmitCreateIteratorResult(false);
   1913       __ push(result_register());
   1914       // Fall through.
   1915     case Yield::INITIAL: {
   1916       Label suspend, continuation, post_runtime, resume;
   1917 
   1918       __ jmp(&suspend);
   1919 
   1920       __ bind(&continuation);
   1921       __ jmp(&resume);
   1922 
   1923       __ bind(&suspend);
   1924       VisitForAccumulatorValue(expr->generator_object());
   1925       ASSERT(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
   1926       __ mov(FieldOperand(eax, JSGeneratorObject::kContinuationOffset),
   1927              Immediate(Smi::FromInt(continuation.pos())));
   1928       __ mov(FieldOperand(eax, JSGeneratorObject::kContextOffset), esi);
   1929       __ mov(ecx, esi);
   1930       __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx,
   1931                           kDontSaveFPRegs);
   1932       __ lea(ebx, Operand(ebp, StandardFrameConstants::kExpressionsOffset));
   1933       __ cmp(esp, ebx);
   1934       __ j(equal, &post_runtime);
   1935       __ push(eax);  // generator object
   1936       __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1);
   1937       __ mov(context_register(),
   1938              Operand(ebp, StandardFrameConstants::kContextOffset));
   1939       __ bind(&post_runtime);
   1940       __ pop(result_register());
   1941       EmitReturnSequence();
   1942 
   1943       __ bind(&resume);
   1944       context()->Plug(result_register());
   1945       break;
   1946     }
   1947 
   1948     case Yield::FINAL: {
   1949       VisitForAccumulatorValue(expr->generator_object());
   1950       __ mov(FieldOperand(result_register(),
   1951                           JSGeneratorObject::kContinuationOffset),
   1952              Immediate(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
   1953       // Pop value from top-of-stack slot, box result into result register.
   1954       EmitCreateIteratorResult(true);
   1955       EmitUnwindBeforeReturn();
   1956       EmitReturnSequence();
   1957       break;
   1958     }
   1959 
   1960     case Yield::DELEGATING: {
   1961       VisitForStackValue(expr->generator_object());
   1962 
   1963       // Initial stack layout is as follows:
   1964       // [sp + 1 * kPointerSize] iter
   1965       // [sp + 0 * kPointerSize] g
   1966 
   1967       Label l_catch, l_try, l_suspend, l_continuation, l_resume;
   1968       Label l_next, l_call, l_loop;
   1969       // Initial send value is undefined.
   1970       __ mov(eax, isolate()->factory()->undefined_value());
   1971       __ jmp(&l_next);
   1972 
   1973       // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
   1974       __ bind(&l_catch);
   1975       handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
   1976       __ mov(ecx, isolate()->factory()->throw_string());  // "throw"
   1977       __ push(ecx);                                      // "throw"
   1978       __ push(Operand(esp, 2 * kPointerSize));           // iter
   1979       __ push(eax);                                      // exception
   1980       __ jmp(&l_call);
   1981 
   1982       // try { received = %yield result }
   1983       // Shuffle the received result above a try handler and yield it without
   1984       // re-boxing.
   1985       __ bind(&l_try);
   1986       __ pop(eax);                                       // result
   1987       __ PushTryHandler(StackHandler::CATCH, expr->index());
   1988       const int handler_size = StackHandlerConstants::kSize;
   1989       __ push(eax);                                      // result
   1990       __ jmp(&l_suspend);
   1991       __ bind(&l_continuation);
   1992       __ jmp(&l_resume);
   1993       __ bind(&l_suspend);
   1994       const int generator_object_depth = kPointerSize + handler_size;
   1995       __ mov(eax, Operand(esp, generator_object_depth));
   1996       __ push(eax);                                      // g
   1997       ASSERT(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
   1998       __ mov(FieldOperand(eax, JSGeneratorObject::kContinuationOffset),
   1999              Immediate(Smi::FromInt(l_continuation.pos())));
   2000       __ mov(FieldOperand(eax, JSGeneratorObject::kContextOffset), esi);
   2001       __ mov(ecx, esi);
   2002       __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx,
   2003                           kDontSaveFPRegs);
   2004       __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1);
   2005       __ mov(context_register(),
   2006              Operand(ebp, StandardFrameConstants::kContextOffset));
   2007       __ pop(eax);                                       // result
   2008       EmitReturnSequence();
   2009       __ bind(&l_resume);                                // received in eax
   2010       __ PopTryHandler();
   2011 
   2012       // receiver = iter; f = iter.next; arg = received;
   2013       __ bind(&l_next);
   2014       __ mov(ecx, isolate()->factory()->next_string());  // "next"
   2015       __ push(ecx);
   2016       __ push(Operand(esp, 2 * kPointerSize));           // iter
   2017       __ push(eax);                                      // received
   2018 
   2019       // result = receiver[f](arg);
   2020       __ bind(&l_call);
   2021       __ mov(edx, Operand(esp, kPointerSize));
   2022       Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
   2023       CallIC(ic, TypeFeedbackId::None());
   2024       __ mov(edi, eax);
   2025       __ mov(Operand(esp, 2 * kPointerSize), edi);
   2026       CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
   2027       __ CallStub(&stub);
   2028 
   2029       __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
   2030       __ Drop(1);  // The function is still on the stack; drop it.
   2031 
   2032       // if (!result.done) goto l_try;
   2033       __ bind(&l_loop);
   2034       __ push(eax);                                      // save result
   2035       __ mov(edx, eax);                                  // result
   2036       __ mov(ecx, isolate()->factory()->done_string());  // "done"
   2037       CallLoadIC(NOT_CONTEXTUAL);                        // result.done in eax
   2038       Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
   2039       CallIC(bool_ic);
   2040       __ test(eax, eax);
   2041       __ j(zero, &l_try);
   2042 
   2043       // result.value
   2044       __ pop(edx);                                        // result
   2045       __ mov(ecx, isolate()->factory()->value_string());  // "value"
   2046       CallLoadIC(NOT_CONTEXTUAL);                         // result.value in eax
   2047       context()->DropAndPlug(2, eax);                     // drop iter and g
   2048       break;
   2049     }
   2050   }
   2051 }
   2052 
   2053 
   2054 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
   2055     Expression *value,
   2056     JSGeneratorObject::ResumeMode resume_mode) {
   2057   // The value stays in eax, and is ultimately read by the resumed generator, as
   2058   // if CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject) returned it. Or it
   2059   // is read to throw the value when the resumed generator is already closed.
   2060   // ebx will hold the generator object until the activation has been resumed.
   2061   VisitForStackValue(generator);
   2062   VisitForAccumulatorValue(value);
   2063   __ pop(ebx);
   2064 
   2065   // Check generator state.
   2066   Label wrong_state, closed_state, done;
   2067   STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0);
   2068   STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0);
   2069   __ cmp(FieldOperand(ebx, JSGeneratorObject::kContinuationOffset),
   2070          Immediate(Smi::FromInt(0)));
   2071   __ j(equal, &closed_state);
   2072   __ j(less, &wrong_state);
   2073 
   2074   // Load suspended function and context.
   2075   __ mov(esi, FieldOperand(ebx, JSGeneratorObject::kContextOffset));
   2076   __ mov(edi, FieldOperand(ebx, JSGeneratorObject::kFunctionOffset));
   2077 
   2078   // Push receiver.
   2079   __ push(FieldOperand(ebx, JSGeneratorObject::kReceiverOffset));
   2080 
   2081   // Push holes for arguments to generator function.
   2082   __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
   2083   __ mov(edx,
   2084          FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
   2085   __ mov(ecx, isolate()->factory()->the_hole_value());
   2086   Label push_argument_holes, push_frame;
   2087   __ bind(&push_argument_holes);
   2088   __ sub(edx, Immediate(Smi::FromInt(1)));
   2089   __ j(carry, &push_frame);
   2090   __ push(ecx);
   2091   __ jmp(&push_argument_holes);
   2092 
   2093   // Enter a new JavaScript frame, and initialize its slots as they were when
   2094   // the generator was suspended.
   2095   Label resume_frame;
   2096   __ bind(&push_frame);
   2097   __ call(&resume_frame);
   2098   __ jmp(&done);
   2099   __ bind(&resume_frame);
   2100   __ push(ebp);  // Caller's frame pointer.
   2101   __ mov(ebp, esp);
   2102   __ push(esi);  // Callee's context.
   2103   __ push(edi);  // Callee's JS Function.
   2104 
   2105   // Load the operand stack size.
   2106   __ mov(edx, FieldOperand(ebx, JSGeneratorObject::kOperandStackOffset));
   2107   __ mov(edx, FieldOperand(edx, FixedArray::kLengthOffset));
   2108   __ SmiUntag(edx);
   2109 
   2110   // If we are sending a value and there is no operand stack, we can jump back
   2111   // in directly.
   2112   if (resume_mode == JSGeneratorObject::NEXT) {
   2113     Label slow_resume;
   2114     __ cmp(edx, Immediate(0));
   2115     __ j(not_zero, &slow_resume);
   2116     __ mov(edx, FieldOperand(edi, JSFunction::kCodeEntryOffset));
   2117     __ mov(ecx, FieldOperand(ebx, JSGeneratorObject::kContinuationOffset));
   2118     __ SmiUntag(ecx);
   2119     __ add(edx, ecx);
   2120     __ mov(FieldOperand(ebx, JSGeneratorObject::kContinuationOffset),
   2121            Immediate(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
   2122     __ jmp(edx);
   2123     __ bind(&slow_resume);
   2124   }
   2125 
   2126   // Otherwise, we push holes for the operand stack and call the runtime to fix
   2127   // up the stack and the handlers.
   2128   Label push_operand_holes, call_resume;
   2129   __ bind(&push_operand_holes);
   2130   __ sub(edx, Immediate(1));
   2131   __ j(carry, &call_resume);
   2132   __ push(ecx);
   2133   __ jmp(&push_operand_holes);
   2134   __ bind(&call_resume);
   2135   __ push(ebx);
   2136   __ push(result_register());
   2137   __ Push(Smi::FromInt(resume_mode));
   2138   __ CallRuntime(Runtime::kHiddenResumeJSGeneratorObject, 3);
   2139   // Not reached: the runtime call returns elsewhere.
   2140   __ Abort(kGeneratorFailedToResume);
   2141 
   2142   // Reach here when generator is closed.
   2143   __ bind(&closed_state);
   2144   if (resume_mode == JSGeneratorObject::NEXT) {
   2145     // Return completed iterator result when generator is closed.
   2146     __ push(Immediate(isolate()->factory()->undefined_value()));
   2147     // Pop value from top-of-stack slot; box result into result register.
   2148     EmitCreateIteratorResult(true);
   2149   } else {
   2150     // Throw the provided value.
   2151     __ push(eax);
   2152     __ CallRuntime(Runtime::kHiddenThrow, 1);
   2153   }
   2154   __ jmp(&done);
   2155 
   2156   // Throw error if we attempt to operate on a running generator.
   2157   __ bind(&wrong_state);
   2158   __ push(ebx);
   2159   __ CallRuntime(Runtime::kHiddenThrowGeneratorStateError, 1);
   2160 
   2161   __ bind(&done);
   2162   context()->Plug(result_register());
   2163 }
   2164 
   2165 
   2166 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
   2167   Label gc_required;
   2168   Label allocated;
   2169 
   2170   Handle<Map> map(isolate()->native_context()->iterator_result_map());
   2171 
   2172   __ Allocate(map->instance_size(), eax, ecx, edx, &gc_required, TAG_OBJECT);
   2173   __ jmp(&allocated);
   2174 
   2175   __ bind(&gc_required);
   2176   __ Push(Smi::FromInt(map->instance_size()));
   2177   __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1);
   2178   __ mov(context_register(),
   2179          Operand(ebp, StandardFrameConstants::kContextOffset));
   2180 
   2181   __ bind(&allocated);
   2182   __ mov(ebx, map);
   2183   __ pop(ecx);
   2184   __ mov(edx, isolate()->factory()->ToBoolean(done));
   2185   ASSERT_EQ(map->instance_size(), 5 * kPointerSize);
   2186   __ mov(FieldOperand(eax, HeapObject::kMapOffset), ebx);
   2187   __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
   2188          isolate()->factory()->empty_fixed_array());
   2189   __ mov(FieldOperand(eax, JSObject::kElementsOffset),
   2190          isolate()->factory()->empty_fixed_array());
   2191   __ mov(FieldOperand(eax, JSGeneratorObject::kResultValuePropertyOffset), ecx);
   2192   __ mov(FieldOperand(eax, JSGeneratorObject::kResultDonePropertyOffset), edx);
   2193 
   2194   // Only the value field needs a write barrier, as the other values are in the
   2195   // root set.
   2196   __ RecordWriteField(eax, JSGeneratorObject::kResultValuePropertyOffset,
   2197                       ecx, edx, kDontSaveFPRegs);
   2198 }
   2199 
   2200 
   2201 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
   2202   SetSourcePosition(prop->position());
   2203   Literal* key = prop->key()->AsLiteral();
   2204   ASSERT(!key->value()->IsSmi());
   2205   __ mov(ecx, Immediate(key->value()));
   2206   CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
   2207 }
   2208 
   2209 
   2210 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
   2211   SetSourcePosition(prop->position());
   2212   Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
   2213   CallIC(ic, prop->PropertyFeedbackId());
   2214 }
   2215 
   2216 
   2217 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
   2218                                               Token::Value op,
   2219                                               OverwriteMode mode,
   2220                                               Expression* left,
   2221                                               Expression* right) {
   2222   // Do combined smi check of the operands. Left operand is on the
   2223   // stack. Right operand is in eax.
   2224   Label smi_case, done, stub_call;
   2225   __ pop(edx);
   2226   __ mov(ecx, eax);
   2227   __ or_(eax, edx);
   2228   JumpPatchSite patch_site(masm_);
   2229   patch_site.EmitJumpIfSmi(eax, &smi_case, Label::kNear);
   2230 
   2231   __ bind(&stub_call);
   2232   __ mov(eax, ecx);
   2233   BinaryOpICStub stub(isolate(), op, mode);
   2234   CallIC(stub.GetCode(), expr->BinaryOperationFeedbackId());
   2235   patch_site.EmitPatchInfo();
   2236   __ jmp(&done, Label::kNear);
   2237 
   2238   // Smi case.
   2239   __ bind(&smi_case);
   2240   __ mov(eax, edx);  // Copy left operand in case of a stub call.
   2241 
   2242   switch (op) {
   2243     case Token::SAR:
   2244       __ SmiUntag(ecx);
   2245       __ sar_cl(eax);  // No checks of result necessary
   2246       __ and_(eax, Immediate(~kSmiTagMask));
   2247       break;
   2248     case Token::SHL: {
   2249       Label result_ok;
   2250       __ SmiUntag(eax);
   2251       __ SmiUntag(ecx);
   2252       __ shl_cl(eax);
   2253       // Check that the *signed* result fits in a smi.
   2254       __ cmp(eax, 0xc0000000);
   2255       __ j(positive, &result_ok);
   2256       __ SmiTag(ecx);
   2257       __ jmp(&stub_call);
   2258       __ bind(&result_ok);
   2259       __ SmiTag(eax);
   2260       break;
   2261     }
   2262     case Token::SHR: {
   2263       Label result_ok;
   2264       __ SmiUntag(eax);
   2265       __ SmiUntag(ecx);
   2266       __ shr_cl(eax);
   2267       __ test(eax, Immediate(0xc0000000));
   2268       __ j(zero, &result_ok);
   2269       __ SmiTag(ecx);
   2270       __ jmp(&stub_call);
   2271       __ bind(&result_ok);
   2272       __ SmiTag(eax);
   2273       break;
   2274     }
   2275     case Token::ADD:
   2276       __ add(eax, ecx);
   2277       __ j(overflow, &stub_call);
   2278       break;
   2279     case Token::SUB:
   2280       __ sub(eax, ecx);
   2281       __ j(overflow, &stub_call);
   2282       break;
   2283     case Token::MUL: {
   2284       __ SmiUntag(eax);
   2285       __ imul(eax, ecx);
   2286       __ j(overflow, &stub_call);
   2287       __ test(eax, eax);
   2288       __ j(not_zero, &done, Label::kNear);
   2289       __ mov(ebx, edx);
   2290       __ or_(ebx, ecx);
   2291       __ j(negative, &stub_call);
   2292       break;
   2293     }
   2294     case Token::BIT_OR:
   2295       __ or_(eax, ecx);
   2296       break;
   2297     case Token::BIT_AND:
   2298       __ and_(eax, ecx);
   2299       break;
   2300     case Token::BIT_XOR:
   2301       __ xor_(eax, ecx);
   2302       break;
   2303     default:
   2304       UNREACHABLE();
   2305   }
   2306 
   2307   __ bind(&done);
   2308   context()->Plug(eax);
   2309 }
   2310 
   2311 
   2312 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
   2313                                      Token::Value op,
   2314                                      OverwriteMode mode) {
   2315   __ pop(edx);
   2316   BinaryOpICStub stub(isolate(), op, mode);
   2317   JumpPatchSite patch_site(masm_);    // unbound, signals no inlined smi code.
   2318   CallIC(stub.GetCode(), expr->BinaryOperationFeedbackId());
   2319   patch_site.EmitPatchInfo();
   2320   context()->Plug(eax);
   2321 }
   2322 
   2323 
   2324 void FullCodeGenerator::EmitAssignment(Expression* expr) {
   2325   ASSERT(expr->IsValidReferenceExpression());
   2326 
   2327   // Left-hand side can only be a property, a global or a (parameter or local)
   2328   // slot.
   2329   enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
   2330   LhsKind assign_type = VARIABLE;
   2331   Property* prop = expr->AsProperty();
   2332   if (prop != NULL) {
   2333     assign_type = (prop->key()->IsPropertyName())
   2334         ? NAMED_PROPERTY
   2335         : KEYED_PROPERTY;
   2336   }
   2337 
   2338   switch (assign_type) {
   2339     case VARIABLE: {
   2340       Variable* var = expr->AsVariableProxy()->var();
   2341       EffectContext context(this);
   2342       EmitVariableAssignment(var, Token::ASSIGN);
   2343       break;
   2344     }
   2345     case NAMED_PROPERTY: {
   2346       __ push(eax);  // Preserve value.
   2347       VisitForAccumulatorValue(prop->obj());
   2348       __ mov(edx, eax);
   2349       __ pop(eax);  // Restore value.
   2350       __ mov(ecx, prop->key()->AsLiteral()->value());
   2351       CallStoreIC();
   2352       break;
   2353     }
   2354     case KEYED_PROPERTY: {
   2355       __ push(eax);  // Preserve value.
   2356       VisitForStackValue(prop->obj());
   2357       VisitForAccumulatorValue(prop->key());
   2358       __ mov(ecx, eax);
   2359       __ pop(edx);  // Receiver.
   2360       __ pop(eax);  // Restore value.
   2361       Handle<Code> ic = strict_mode() == SLOPPY
   2362           ? isolate()->builtins()->KeyedStoreIC_Initialize()
   2363           : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
   2364       CallIC(ic);
   2365       break;
   2366     }
   2367   }
   2368   context()->Plug(eax);
   2369 }
   2370 
   2371 
   2372 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
   2373     Variable* var, MemOperand location) {
   2374   __ mov(location, eax);
   2375   if (var->IsContextSlot()) {
   2376     __ mov(edx, eax);
   2377     int offset = Context::SlotOffset(var->index());
   2378     __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs);
   2379   }
   2380 }
   2381 
   2382 
   2383 void FullCodeGenerator::EmitCallStoreContextSlot(
   2384     Handle<String> name, StrictMode strict_mode) {
   2385   __ push(eax);  // Value.
   2386   __ push(esi);  // Context.
   2387   __ push(Immediate(name));
   2388   __ push(Immediate(Smi::FromInt(strict_mode)));
   2389   __ CallRuntime(Runtime::kHiddenStoreContextSlot, 4);
   2390 }
   2391 
   2392 
   2393 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
   2394                                                Token::Value op) {
   2395   if (var->IsUnallocated()) {
   2396     // Global var, const, or let.
   2397     __ mov(ecx, var->name());
   2398     __ mov(edx, GlobalObjectOperand());
   2399     CallStoreIC();
   2400 
   2401   } else if (op == Token::INIT_CONST_LEGACY) {
   2402     // Const initializers need a write barrier.
   2403     ASSERT(!var->IsParameter());  // No const parameters.
   2404     if (var->IsLookupSlot()) {
   2405       __ push(eax);
   2406       __ push(esi);
   2407       __ push(Immediate(var->name()));
   2408       __ CallRuntime(Runtime::kHiddenInitializeConstContextSlot, 3);
   2409     } else {
   2410       ASSERT(var->IsStackLocal() || var->IsContextSlot());
   2411       Label skip;
   2412       MemOperand location = VarOperand(var, ecx);
   2413       __ mov(edx, location);
   2414       __ cmp(edx, isolate()->factory()->the_hole_value());
   2415       __ j(not_equal, &skip, Label::kNear);
   2416       EmitStoreToStackLocalOrContextSlot(var, location);
   2417       __ bind(&skip);
   2418     }
   2419 
   2420   } else if (var->mode() == LET && op != Token::INIT_LET) {
   2421     // Non-initializing assignment to let variable needs a write barrier.
   2422     if (var->IsLookupSlot()) {
   2423       EmitCallStoreContextSlot(var->name(), strict_mode());
   2424     } else {
   2425       ASSERT(var->IsStackAllocated() || var->IsContextSlot());
   2426       Label assign;
   2427       MemOperand location = VarOperand(var, ecx);
   2428       __ mov(edx, location);
   2429       __ cmp(edx, isolate()->factory()->the_hole_value());
   2430       __ j(not_equal, &assign, Label::kNear);
   2431       __ push(Immediate(var->name()));
   2432       __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
   2433       __ bind(&assign);
   2434       EmitStoreToStackLocalOrContextSlot(var, location);
   2435     }
   2436 
   2437   } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
   2438     // Assignment to var or initializing assignment to let/const
   2439     // in harmony mode.
   2440     if (var->IsLookupSlot()) {
   2441       EmitCallStoreContextSlot(var->name(), strict_mode());
   2442     } else {
   2443       ASSERT(var->IsStackAllocated() || var->IsContextSlot());
   2444       MemOperand location = VarOperand(var, ecx);
   2445       if (generate_debug_code_ && op == Token::INIT_LET) {
   2446         // Check for an uninitialized let binding.
   2447         __ mov(edx, location);
   2448         __ cmp(edx, isolate()->factory()->the_hole_value());
   2449         __ Check(equal, kLetBindingReInitialization);
   2450       }
   2451       EmitStoreToStackLocalOrContextSlot(var, location);
   2452     }
   2453   }
   2454   // Non-initializing assignments to consts are ignored.
   2455 }
   2456 
   2457 
   2458 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
   2459   // Assignment to a property, using a named store IC.
   2460   // eax    : value
   2461   // esp[0] : receiver
   2462 
   2463   Property* prop = expr->target()->AsProperty();
   2464   ASSERT(prop != NULL);
   2465   ASSERT(prop->key()->IsLiteral());
   2466 
   2467   // Record source code position before IC call.
   2468   SetSourcePosition(expr->position());
   2469   __ mov(ecx, prop->key()->AsLiteral()->value());
   2470   __ pop(edx);
   2471   CallStoreIC(expr->AssignmentFeedbackId());
   2472   PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   2473   context()->Plug(eax);
   2474 }
   2475 
   2476 
   2477 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
   2478   // Assignment to a property, using a keyed store IC.
   2479   // eax               : value
   2480   // esp[0]            : key
   2481   // esp[kPointerSize] : receiver
   2482 
   2483   __ pop(ecx);  // Key.
   2484   __ pop(edx);
   2485   // Record source code position before IC call.
   2486   SetSourcePosition(expr->position());
   2487   Handle<Code> ic = strict_mode() == SLOPPY
   2488       ? isolate()->builtins()->KeyedStoreIC_Initialize()
   2489       : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
   2490   CallIC(ic, expr->AssignmentFeedbackId());
   2491 
   2492   PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   2493   context()->Plug(eax);
   2494 }
   2495 
   2496 
   2497 void FullCodeGenerator::VisitProperty(Property* expr) {
   2498   Comment cmnt(masm_, "[ Property");
   2499   Expression* key = expr->key();
   2500 
   2501   if (key->IsPropertyName()) {
   2502     VisitForAccumulatorValue(expr->obj());
   2503     __ mov(edx, result_register());
   2504     EmitNamedPropertyLoad(expr);
   2505     PrepareForBailoutForId(expr->LoadId(), TOS_REG);
   2506     context()->Plug(eax);
   2507   } else {
   2508     VisitForStackValue(expr->obj());
   2509     VisitForAccumulatorValue(expr->key());
   2510     __ pop(edx);                     // Object.
   2511     __ mov(ecx, result_register());  // Key.
   2512     EmitKeyedPropertyLoad(expr);
   2513     context()->Plug(eax);
   2514   }
   2515 }
   2516 
   2517 
   2518 void FullCodeGenerator::CallIC(Handle<Code> code,
   2519                                TypeFeedbackId ast_id) {
   2520   ic_total_count_++;
   2521   __ call(code, RelocInfo::CODE_TARGET, ast_id);
   2522 }
   2523 
   2524 
   2525 // Code common for calls using the IC.
   2526 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
   2527   Expression* callee = expr->expression();
   2528 
   2529   CallIC::CallType call_type = callee->IsVariableProxy()
   2530       ? CallIC::FUNCTION
   2531       : CallIC::METHOD;
   2532   // Get the target function.
   2533   if (call_type == CallIC::FUNCTION) {
   2534     { StackValueContext context(this);
   2535       EmitVariableLoad(callee->AsVariableProxy());
   2536       PrepareForBailout(callee, NO_REGISTERS);
   2537     }
   2538     // Push undefined as receiver. This is patched in the method prologue if it
   2539     // is a sloppy mode method.
   2540     __ push(Immediate(isolate()->factory()->undefined_value()));
   2541   } else {
   2542     // Load the function from the receiver.
   2543     ASSERT(callee->IsProperty());
   2544     __ mov(edx, Operand(esp, 0));
   2545     EmitNamedPropertyLoad(callee->AsProperty());
   2546     PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
   2547     // Push the target function under the receiver.
   2548     __ push(Operand(esp, 0));
   2549     __ mov(Operand(esp, kPointerSize), eax);
   2550   }
   2551 
   2552   EmitCall(expr, call_type);
   2553 }
   2554 
   2555 
   2556 // Code common for calls using the IC.
   2557 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
   2558                                                 Expression* key) {
   2559   // Load the key.
   2560   VisitForAccumulatorValue(key);
   2561 
   2562   Expression* callee = expr->expression();
   2563 
   2564   // Load the function from the receiver.
   2565   ASSERT(callee->IsProperty());
   2566   __ mov(edx, Operand(esp, 0));
   2567   // Move the key into the right register for the keyed load IC.
   2568   __ mov(ecx, eax);
   2569   EmitKeyedPropertyLoad(callee->AsProperty());
   2570   PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
   2571 
   2572   // Push the target function under the receiver.
   2573   __ push(Operand(esp, 0));
   2574   __ mov(Operand(esp, kPointerSize), eax);
   2575 
   2576   EmitCall(expr, CallIC::METHOD);
   2577 }
   2578 
   2579 
   2580 void FullCodeGenerator::EmitCall(Call* expr, CallIC::CallType call_type) {
   2581   // Load the arguments.
   2582   ZoneList<Expression*>* args = expr->arguments();
   2583   int arg_count = args->length();
   2584   { PreservePositionScope scope(masm()->positions_recorder());
   2585     for (int i = 0; i < arg_count; i++) {
   2586       VisitForStackValue(args->at(i));
   2587     }
   2588   }
   2589 
   2590   // Record source position of the IC call.
   2591   SetSourcePosition(expr->position());
   2592   Handle<Code> ic = CallIC::initialize_stub(
   2593       isolate(), arg_count, call_type);
   2594   __ Move(edx, Immediate(Smi::FromInt(expr->CallFeedbackSlot())));
   2595   __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
   2596   // Don't assign a type feedback id to the IC, since type feedback is provided
   2597   // by the vector above.
   2598   CallIC(ic);
   2599 
   2600   RecordJSReturnSite(expr);
   2601 
   2602   // Restore context register.
   2603   __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
   2604 
   2605   context()->DropAndPlug(1, eax);
   2606 }
   2607 
   2608 
   2609 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
   2610   // Push copy of the first argument or undefined if it doesn't exist.
   2611   if (arg_count > 0) {
   2612     __ push(Operand(esp, arg_count * kPointerSize));
   2613   } else {
   2614     __ push(Immediate(isolate()->factory()->undefined_value()));
   2615   }
   2616 
   2617   // Push the receiver of the enclosing function.
   2618   __ push(Operand(ebp, (2 + info_->scope()->num_parameters()) * kPointerSize));
   2619   // Push the language mode.
   2620   __ push(Immediate(Smi::FromInt(strict_mode())));
   2621 
   2622   // Push the start position of the scope the calls resides in.
   2623   __ push(Immediate(Smi::FromInt(scope()->start_position())));
   2624 
   2625   // Do the runtime call.
   2626   __ CallRuntime(Runtime::kHiddenResolvePossiblyDirectEval, 5);
   2627 }
   2628 
   2629 
   2630 void FullCodeGenerator::VisitCall(Call* expr) {
   2631 #ifdef DEBUG
   2632   // We want to verify that RecordJSReturnSite gets called on all paths
   2633   // through this function.  Avoid early returns.
   2634   expr->return_is_recorded_ = false;
   2635 #endif
   2636 
   2637   Comment cmnt(masm_, "[ Call");
   2638   Expression* callee = expr->expression();
   2639   Call::CallType call_type = expr->GetCallType(isolate());
   2640 
   2641   if (call_type == Call::POSSIBLY_EVAL_CALL) {
   2642     // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
   2643     // to resolve the function we need to call and the receiver of the call.
   2644     // Then we call the resolved function using the given arguments.
   2645     ZoneList<Expression*>* args = expr->arguments();
   2646     int arg_count = args->length();
   2647     { PreservePositionScope pos_scope(masm()->positions_recorder());
   2648       VisitForStackValue(callee);
   2649       // Reserved receiver slot.
   2650       __ push(Immediate(isolate()->factory()->undefined_value()));
   2651       // Push the arguments.
   2652       for (int i = 0; i < arg_count; i++) {
   2653         VisitForStackValue(args->at(i));
   2654       }
   2655 
   2656       // Push a copy of the function (found below the arguments) and
   2657       // resolve eval.
   2658       __ push(Operand(esp, (arg_count + 1) * kPointerSize));
   2659       EmitResolvePossiblyDirectEval(arg_count);
   2660 
   2661       // The runtime call returns a pair of values in eax (function) and
   2662       // edx (receiver). Touch up the stack with the right values.
   2663       __ mov(Operand(esp, (arg_count + 0) * kPointerSize), edx);
   2664       __ mov(Operand(esp, (arg_count + 1) * kPointerSize), eax);
   2665     }
   2666     // Record source position for debugger.
   2667     SetSourcePosition(expr->position());
   2668     CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
   2669     __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
   2670     __ CallStub(&stub);
   2671     RecordJSReturnSite(expr);
   2672     // Restore context register.
   2673     __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
   2674     context()->DropAndPlug(1, eax);
   2675 
   2676   } else if (call_type == Call::GLOBAL_CALL) {
   2677     EmitCallWithLoadIC(expr);
   2678 
   2679   } else if (call_type == Call::LOOKUP_SLOT_CALL) {
   2680     // Call to a lookup slot (dynamically introduced variable).
   2681     VariableProxy* proxy = callee->AsVariableProxy();
   2682     Label slow, done;
   2683     { PreservePositionScope scope(masm()->positions_recorder());
   2684       // Generate code for loading from variables potentially shadowed by
   2685       // eval-introduced variables.
   2686       EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
   2687     }
   2688     __ bind(&slow);
   2689     // Call the runtime to find the function to call (returned in eax) and
   2690     // the object holding it (returned in edx).
   2691     __ push(context_register());
   2692     __ push(Immediate(proxy->name()));
   2693     __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2);
   2694     __ push(eax);  // Function.
   2695     __ push(edx);  // Receiver.
   2696 
   2697     // If fast case code has been generated, emit code to push the function
   2698     // and receiver and have the slow path jump around this code.
   2699     if (done.is_linked()) {
   2700       Label call;
   2701       __ jmp(&call, Label::kNear);
   2702       __ bind(&done);
   2703       // Push function.
   2704       __ push(eax);
   2705       // The receiver is implicitly the global receiver. Indicate this by
   2706       // passing the hole to the call function stub.
   2707       __ push(Immediate(isolate()->factory()->undefined_value()));
   2708       __ bind(&call);
   2709     }
   2710 
   2711     // The receiver is either the global receiver or an object found by
   2712     // LoadContextSlot.
   2713     EmitCall(expr);
   2714 
   2715   } else if (call_type == Call::PROPERTY_CALL) {
   2716     Property* property = callee->AsProperty();
   2717     { PreservePositionScope scope(masm()->positions_recorder());
   2718       VisitForStackValue(property->obj());
   2719     }
   2720     if (property->key()->IsPropertyName()) {
   2721       EmitCallWithLoadIC(expr);
   2722     } else {
   2723       EmitKeyedCallWithLoadIC(expr, property->key());
   2724     }
   2725 
   2726   } else {
   2727     ASSERT(call_type == Call::OTHER_CALL);
   2728     // Call to an arbitrary expression not handled specially above.
   2729     { PreservePositionScope scope(masm()->positions_recorder());
   2730       VisitForStackValue(callee);
   2731     }
   2732     __ push(Immediate(isolate()->factory()->undefined_value()));
   2733     // Emit function call.
   2734     EmitCall(expr);
   2735   }
   2736 
   2737 #ifdef DEBUG
   2738   // RecordJSReturnSite should have been called.
   2739   ASSERT(expr->return_is_recorded_);
   2740 #endif
   2741 }
   2742 
   2743 
   2744 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
   2745   Comment cmnt(masm_, "[ CallNew");
   2746   // According to ECMA-262, section 11.2.2, page 44, the function
   2747   // expression in new calls must be evaluated before the
   2748   // arguments.
   2749 
   2750   // Push constructor on the stack.  If it's not a function it's used as
   2751   // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
   2752   // ignored.
   2753   VisitForStackValue(expr->expression());
   2754 
   2755   // Push the arguments ("left-to-right") on the stack.
   2756   ZoneList<Expression*>* args = expr->arguments();
   2757   int arg_count = args->length();
   2758   for (int i = 0; i < arg_count; i++) {
   2759     VisitForStackValue(args->at(i));
   2760   }
   2761 
   2762   // Call the construct call builtin that handles allocation and
   2763   // constructor invocation.
   2764   SetSourcePosition(expr->position());
   2765 
   2766   // Load function and argument count into edi and eax.
   2767   __ Move(eax, Immediate(arg_count));
   2768   __ mov(edi, Operand(esp, arg_count * kPointerSize));
   2769 
   2770   // Record call targets in unoptimized code.
   2771   if (FLAG_pretenuring_call_new) {
   2772     EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
   2773     ASSERT(expr->AllocationSiteFeedbackSlot() ==
   2774            expr->CallNewFeedbackSlot() + 1);
   2775   }
   2776 
   2777   __ LoadHeapObject(ebx, FeedbackVector());
   2778   __ mov(edx, Immediate(Smi::FromInt(expr->CallNewFeedbackSlot())));
   2779 
   2780   CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
   2781   __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
   2782   PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
   2783   context()->Plug(eax);
   2784 }
   2785 
   2786 
   2787 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
   2788   ZoneList<Expression*>* args = expr->arguments();
   2789   ASSERT(args->length() == 1);
   2790 
   2791   VisitForAccumulatorValue(args->at(0));
   2792 
   2793   Label materialize_true, materialize_false;
   2794   Label* if_true = NULL;
   2795   Label* if_false = NULL;
   2796   Label* fall_through = NULL;
   2797   context()->PrepareTest(&materialize_true, &materialize_false,
   2798                          &if_true, &if_false, &fall_through);
   2799 
   2800   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2801   __ test(eax, Immediate(kSmiTagMask));
   2802   Split(zero, if_true, if_false, fall_through);
   2803 
   2804   context()->Plug(if_true, if_false);
   2805 }
   2806 
   2807 
   2808 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
   2809   ZoneList<Expression*>* args = expr->arguments();
   2810   ASSERT(args->length() == 1);
   2811 
   2812   VisitForAccumulatorValue(args->at(0));
   2813 
   2814   Label materialize_true, materialize_false;
   2815   Label* if_true = NULL;
   2816   Label* if_false = NULL;
   2817   Label* fall_through = NULL;
   2818   context()->PrepareTest(&materialize_true, &materialize_false,
   2819                          &if_true, &if_false, &fall_through);
   2820 
   2821   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2822   __ test(eax, Immediate(kSmiTagMask | 0x80000000));
   2823   Split(zero, if_true, if_false, fall_through);
   2824 
   2825   context()->Plug(if_true, if_false);
   2826 }
   2827 
   2828 
   2829 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
   2830   ZoneList<Expression*>* args = expr->arguments();
   2831   ASSERT(args->length() == 1);
   2832 
   2833   VisitForAccumulatorValue(args->at(0));
   2834 
   2835   Label materialize_true, materialize_false;
   2836   Label* if_true = NULL;
   2837   Label* if_false = NULL;
   2838   Label* fall_through = NULL;
   2839   context()->PrepareTest(&materialize_true, &materialize_false,
   2840                          &if_true, &if_false, &fall_through);
   2841 
   2842   __ JumpIfSmi(eax, if_false);
   2843   __ cmp(eax, isolate()->factory()->null_value());
   2844   __ j(equal, if_true);
   2845   __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
   2846   // Undetectable objects behave like undefined when tested with typeof.
   2847   __ movzx_b(ecx, FieldOperand(ebx, Map::kBitFieldOffset));
   2848   __ test(ecx, Immediate(1 << Map::kIsUndetectable));
   2849   __ j(not_zero, if_false);
   2850   __ movzx_b(ecx, FieldOperand(ebx, Map::kInstanceTypeOffset));
   2851   __ cmp(ecx, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
   2852   __ j(below, if_false);
   2853   __ cmp(ecx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
   2854   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2855   Split(below_equal, if_true, if_false, fall_through);
   2856 
   2857   context()->Plug(if_true, if_false);
   2858 }
   2859 
   2860 
   2861 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
   2862   ZoneList<Expression*>* args = expr->arguments();
   2863   ASSERT(args->length() == 1);
   2864 
   2865   VisitForAccumulatorValue(args->at(0));
   2866 
   2867   Label materialize_true, materialize_false;
   2868   Label* if_true = NULL;
   2869   Label* if_false = NULL;
   2870   Label* fall_through = NULL;
   2871   context()->PrepareTest(&materialize_true, &materialize_false,
   2872                          &if_true, &if_false, &fall_through);
   2873 
   2874   __ JumpIfSmi(eax, if_false);
   2875   __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ebx);
   2876   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2877   Split(above_equal, if_true, if_false, fall_through);
   2878 
   2879   context()->Plug(if_true, if_false);
   2880 }
   2881 
   2882 
   2883 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
   2884   ZoneList<Expression*>* args = expr->arguments();
   2885   ASSERT(args->length() == 1);
   2886 
   2887   VisitForAccumulatorValue(args->at(0));
   2888 
   2889   Label materialize_true, materialize_false;
   2890   Label* if_true = NULL;
   2891   Label* if_false = NULL;
   2892   Label* fall_through = NULL;
   2893   context()->PrepareTest(&materialize_true, &materialize_false,
   2894                          &if_true, &if_false, &fall_through);
   2895 
   2896   __ JumpIfSmi(eax, if_false);
   2897   __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
   2898   __ movzx_b(ebx, FieldOperand(ebx, Map::kBitFieldOffset));
   2899   __ test(ebx, Immediate(1 << Map::kIsUndetectable));
   2900   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2901   Split(not_zero, if_true, if_false, fall_through);
   2902 
   2903   context()->Plug(if_true, if_false);
   2904 }
   2905 
   2906 
   2907 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
   2908     CallRuntime* expr) {
   2909   ZoneList<Expression*>* args = expr->arguments();
   2910   ASSERT(args->length() == 1);
   2911 
   2912   VisitForAccumulatorValue(args->at(0));
   2913 
   2914   Label materialize_true, materialize_false, skip_lookup;
   2915   Label* if_true = NULL;
   2916   Label* if_false = NULL;
   2917   Label* fall_through = NULL;
   2918   context()->PrepareTest(&materialize_true, &materialize_false,
   2919                          &if_true, &if_false, &fall_through);
   2920 
   2921   __ AssertNotSmi(eax);
   2922 
   2923   // Check whether this map has already been checked to be safe for default
   2924   // valueOf.
   2925   __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
   2926   __ test_b(FieldOperand(ebx, Map::kBitField2Offset),
   2927             1 << Map::kStringWrapperSafeForDefaultValueOf);
   2928   __ j(not_zero, &skip_lookup);
   2929 
   2930   // Check for fast case object. Return false for slow case objects.
   2931   __ mov(ecx, FieldOperand(eax, JSObject::kPropertiesOffset));
   2932   __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
   2933   __ cmp(ecx, isolate()->factory()->hash_table_map());
   2934   __ j(equal, if_false);
   2935 
   2936   // Look for valueOf string in the descriptor array, and indicate false if
   2937   // found. Since we omit an enumeration index check, if it is added via a
   2938   // transition that shares its descriptor array, this is a false positive.
   2939   Label entry, loop, done;
   2940 
   2941   // Skip loop if no descriptors are valid.
   2942   __ NumberOfOwnDescriptors(ecx, ebx);
   2943   __ cmp(ecx, 0);
   2944   __ j(equal, &done);
   2945 
   2946   __ LoadInstanceDescriptors(ebx, ebx);
   2947   // ebx: descriptor array.
   2948   // ecx: valid entries in the descriptor array.
   2949   // Calculate the end of the descriptor array.
   2950   STATIC_ASSERT(kSmiTag == 0);
   2951   STATIC_ASSERT(kSmiTagSize == 1);
   2952   STATIC_ASSERT(kPointerSize == 4);
   2953   __ imul(ecx, ecx, DescriptorArray::kDescriptorSize);
   2954   __ lea(ecx, Operand(ebx, ecx, times_4, DescriptorArray::kFirstOffset));
   2955   // Calculate location of the first key name.
   2956   __ add(ebx, Immediate(DescriptorArray::kFirstOffset));
   2957   // Loop through all the keys in the descriptor array. If one of these is the
   2958   // internalized string "valueOf" the result is false.
   2959   __ jmp(&entry);
   2960   __ bind(&loop);
   2961   __ mov(edx, FieldOperand(ebx, 0));
   2962   __ cmp(edx, isolate()->factory()->value_of_string());
   2963   __ j(equal, if_false);
   2964   __ add(ebx, Immediate(DescriptorArray::kDescriptorSize * kPointerSize));
   2965   __ bind(&entry);
   2966   __ cmp(ebx, ecx);
   2967   __ j(not_equal, &loop);
   2968 
   2969   __ bind(&done);
   2970 
   2971   // Reload map as register ebx was used as temporary above.
   2972   __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
   2973 
   2974   // Set the bit in the map to indicate that there is no local valueOf field.
   2975   __ or_(FieldOperand(ebx, Map::kBitField2Offset),
   2976          Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
   2977 
   2978   __ bind(&skip_lookup);
   2979 
   2980   // If a valueOf property is not found on the object check that its
   2981   // prototype is the un-modified String prototype. If not result is false.
   2982   __ mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
   2983   __ JumpIfSmi(ecx, if_false);
   2984   __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
   2985   __ mov(edx, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
   2986   __ mov(edx,
   2987          FieldOperand(edx, GlobalObject::kNativeContextOffset));
   2988   __ cmp(ecx,
   2989          ContextOperand(edx,
   2990                         Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
   2991   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2992   Split(equal, if_true, if_false, fall_through);
   2993 
   2994   context()->Plug(if_true, if_false);
   2995 }
   2996 
   2997 
   2998 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
   2999   ZoneList<Expression*>* args = expr->arguments();
   3000   ASSERT(args->length() == 1);
   3001 
   3002   VisitForAccumulatorValue(args->at(0));
   3003 
   3004   Label materialize_true, materialize_false;
   3005   Label* if_true = NULL;
   3006   Label* if_false = NULL;
   3007   Label* fall_through = NULL;
   3008   context()->PrepareTest(&materialize_true, &materialize_false,
   3009                          &if_true, &if_false, &fall_through);
   3010 
   3011   __ JumpIfSmi(eax, if_false);
   3012   __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
   3013   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   3014   Split(equal, if_true, if_false, fall_through);
   3015 
   3016   context()->Plug(if_true, if_false);
   3017 }
   3018 
   3019 
   3020 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
   3021   ZoneList<Expression*>* args = expr->arguments();
   3022   ASSERT(args->length() == 1);
   3023 
   3024   VisitForAccumulatorValue(args->at(0));
   3025 
   3026   Label materialize_true, materialize_false;
   3027   Label* if_true = NULL;
   3028   Label* if_false = NULL;
   3029   Label* fall_through = NULL;
   3030   context()->PrepareTest(&materialize_true, &materialize_false,
   3031                          &if_true, &if_false, &fall_through);
   3032 
   3033   Handle<Map> map = masm()->isolate()->factory()->heap_number_map();
   3034   __ CheckMap(eax, map, if_false, DO_SMI_CHECK);
   3035   // Check if the exponent half is 0x80000000. Comparing against 1 and
   3036   // checking for overflow is the shortest possible encoding.
   3037   __ cmp(FieldOperand(eax, HeapNumber::kExponentOffset), Immediate(0x1));
   3038   __ j(no_overflow, if_false);
   3039   __ cmp(FieldOperand(eax, HeapNumber::kMantissaOffset), Immediate(0x0));
   3040   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   3041   Split(equal, if_true, if_false, fall_through);
   3042 
   3043   context()->Plug(if_true, if_false);
   3044 }
   3045 
   3046 
   3047 
   3048 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
   3049   ZoneList<Expression*>* args = expr->arguments();
   3050   ASSERT(args->length() == 1);
   3051 
   3052   VisitForAccumulatorValue(args->at(0));
   3053 
   3054   Label materialize_true, materialize_false;
   3055   Label* if_true = NULL;
   3056   Label* if_false = NULL;
   3057   Label* fall_through = NULL;
   3058   context()->PrepareTest(&materialize_true, &materialize_false,
   3059                          &if_true, &if_false, &fall_through);
   3060 
   3061   __ JumpIfSmi(eax, if_false);
   3062   __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
   3063   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   3064   Split(equal, if_true, if_false, fall_through);
   3065 
   3066   context()->Plug(if_true, if_false);
   3067 }
   3068 
   3069 
   3070 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
   3071   ZoneList<Expression*>* args = expr->arguments();
   3072   ASSERT(args->length() == 1);
   3073 
   3074   VisitForAccumulatorValue(args->at(0));
   3075 
   3076   Label materialize_true, materialize_false;
   3077   Label* if_true = NULL;
   3078   Label* if_false = NULL;
   3079   Label* fall_through = NULL;
   3080   context()->PrepareTest(&materialize_true, &materialize_false,
   3081                          &if_true, &if_false, &fall_through);
   3082 
   3083   __ JumpIfSmi(eax, if_false);
   3084   __ CmpObjectType(eax, JS_REGEXP_TYPE, ebx);
   3085   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   3086   Split(equal, if_true, if_false, fall_through);
   3087 
   3088   context()->Plug(if_true, if_false);
   3089 }
   3090 
   3091 
   3092 
   3093 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
   3094   ASSERT(expr->arguments()->length() == 0);
   3095 
   3096   Label materialize_true, materialize_false;
   3097   Label* if_true = NULL;
   3098   Label* if_false = NULL;
   3099   Label* fall_through = NULL;
   3100   context()->PrepareTest(&materialize_true, &materialize_false,
   3101                          &if_true, &if_false, &fall_through);
   3102 
   3103   // Get the frame pointer for the calling frame.
   3104   __ mov(eax, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
   3105 
   3106   // Skip the arguments adaptor frame if it exists.
   3107   Label check_frame_marker;
   3108   __ cmp(Operand(eax, StandardFrameConstants::kContextOffset),
   3109          Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
   3110   __ j(not_equal, &check_frame_marker);
   3111   __ mov(eax, Operand(eax, StandardFrameConstants::kCallerFPOffset));
   3112 
   3113   // Check the marker in the calling frame.
   3114   __ bind(&check_frame_marker);
   3115   __ cmp(Operand(eax, StandardFrameConstants::kMarkerOffset),
   3116          Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
   3117   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   3118   Split(equal, if_true, if_false, fall_through);
   3119 
   3120   context()->Plug(if_true, if_false);
   3121 }
   3122 
   3123 
   3124 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
   3125   ZoneList<Expression*>* args = expr->arguments();
   3126   ASSERT(args->length() == 2);
   3127 
   3128   // Load the two objects into registers and perform the comparison.
   3129   VisitForStackValue(args->at(0));
   3130   VisitForAccumulatorValue(args->at(1));
   3131 
   3132   Label materialize_true, materialize_false;
   3133   Label* if_true = NULL;
   3134   Label* if_false = NULL;
   3135   Label* fall_through = NULL;
   3136   context()->PrepareTest(&materialize_true, &materialize_false,
   3137                          &if_true, &if_false, &fall_through);
   3138 
   3139   __ pop(ebx);
   3140   __ cmp(eax, ebx);
   3141   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   3142   Split(equal, if_true, if_false, fall_through);
   3143 
   3144   context()->Plug(if_true, if_false);
   3145 }
   3146 
   3147 
   3148 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
   3149   ZoneList<Expression*>* args = expr->arguments();
   3150   ASSERT(args->length() == 1);
   3151 
   3152   // ArgumentsAccessStub expects the key in edx and the formal
   3153   // parameter count in eax.
   3154   VisitForAccumulatorValue(args->at(0));
   3155   __ mov(edx, eax);
   3156   __ Move(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
   3157   ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
   3158   __ CallStub(&stub);
   3159   context()->Plug(eax);
   3160 }
   3161 
   3162 
   3163 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
   3164   ASSERT(expr->arguments()->length() == 0);
   3165 
   3166   Label exit;
   3167   // Get the number of formal parameters.
   3168   __ Move(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
   3169 
   3170   // Check if the calling frame is an arguments adaptor frame.
   3171   __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
   3172   __ cmp(Operand(ebx, StandardFrameConstants::kContextOffset),
   3173          Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
   3174   __ j(not_equal, &exit);
   3175 
   3176   // Arguments adaptor case: Read the arguments length from the
   3177   // adaptor frame.
   3178   __ mov(eax, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
   3179 
   3180   __ bind(&exit);
   3181   __ AssertSmi(eax);
   3182   context()->Plug(eax);
   3183 }
   3184 
   3185 
   3186 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
   3187   ZoneList<Expression*>* args = expr->arguments();
   3188   ASSERT(args->length() == 1);
   3189   Label done, null, function, non_function_constructor;
   3190 
   3191   VisitForAccumulatorValue(args->at(0));
   3192 
   3193   // If the object is a smi, we return null.
   3194   __ JumpIfSmi(eax, &null);
   3195 
   3196   // Check that the object is a JS object but take special care of JS
   3197   // functions to make sure they have 'Function' as their class.
   3198   // Assume that there are only two callable types, and one of them is at
   3199   // either end of the type range for JS object types. Saves extra comparisons.
   3200   STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
   3201   __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, eax);
   3202   // Map is now in eax.
   3203   __ j(below, &null);
   3204   STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
   3205                 FIRST_SPEC_OBJECT_TYPE + 1);
   3206   __ j(equal, &function);
   3207 
   3208   __ CmpInstanceType(eax, LAST_SPEC_OBJECT_TYPE);
   3209   STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
   3210                 LAST_SPEC_OBJECT_TYPE - 1);
   3211   __ j(equal, &function);
   3212   // Assume that there is no larger type.
   3213   STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
   3214 
   3215   // Check if the constructor in the map is a JS function.
   3216   __ mov(eax, FieldOperand(eax, Map::kConstructorOffset));
   3217   __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
   3218   __ j(not_equal, &non_function_constructor);
   3219 
   3220   // eax now contains the constructor function. Grab the
   3221   // instance class name from there.
   3222   __ mov(eax, FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset));
   3223   __ mov(eax, FieldOperand(eax, SharedFunctionInfo::kInstanceClassNameOffset));
   3224   __ jmp(&done);
   3225 
   3226   // Functions have class 'Function'.
   3227   __ bind(&function);
   3228   __ mov(eax, isolate()->factory()->function_class_string());
   3229   __ jmp(&done);
   3230 
   3231   // Objects with a non-function constructor have class 'Object'.
   3232   __ bind(&non_function_constructor);
   3233   __ mov(eax, isolate()->factory()->Object_string());
   3234   __ jmp(&done);
   3235 
   3236   // Non-JS objects have class null.
   3237   __ bind(&null);
   3238   __ mov(eax, isolate()->factory()->null_value());
   3239 
   3240   // All done.
   3241   __ bind(&done);
   3242 
   3243   context()->Plug(eax);
   3244 }
   3245 
   3246 
   3247 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
   3248   // Load the arguments on the stack and call the stub.
   3249   SubStringStub stub(isolate());
   3250   ZoneList<Expression*>* args = expr->arguments();
   3251   ASSERT(args->length() == 3);
   3252   VisitForStackValue(args->at(0));
   3253   VisitForStackValue(args->at(1));
   3254   VisitForStackValue(args->at(2));
   3255   __ CallStub(&stub);
   3256   context()->Plug(eax);
   3257 }
   3258 
   3259 
   3260 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
   3261   // Load the arguments on the stack and call the stub.
   3262   RegExpExecStub stub(isolate());
   3263   ZoneList<Expression*>* args = expr->arguments();
   3264   ASSERT(args->length() == 4);
   3265   VisitForStackValue(args->at(0));
   3266   VisitForStackValue(args->at(1));
   3267   VisitForStackValue(args->at(2));
   3268   VisitForStackValue(args->at(3));
   3269   __ CallStub(&stub);
   3270   context()->Plug(eax);
   3271 }
   3272 
   3273 
   3274 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
   3275   ZoneList<Expression*>* args = expr->arguments();
   3276   ASSERT(args->length() == 1);
   3277 
   3278   VisitForAccumulatorValue(args->at(0));  // Load the object.
   3279 
   3280   Label done;
   3281   // If the object is a smi return the object.
   3282   __ JumpIfSmi(eax, &done, Label::kNear);
   3283   // If the object is not a value type, return the object.
   3284   __ CmpObjectType(eax, JS_VALUE_TYPE, ebx);
   3285   __ j(not_equal, &done, Label::kNear);
   3286   __ mov(eax, FieldOperand(eax, JSValue::kValueOffset));
   3287 
   3288   __ bind(&done);
   3289   context()->Plug(eax);
   3290 }
   3291 
   3292 
   3293 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
   3294   ZoneList<Expression*>* args = expr->arguments();
   3295   ASSERT(args->length() == 2);
   3296   ASSERT_NE(NULL, args->at(1)->AsLiteral());
   3297   Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
   3298 
   3299   VisitForAccumulatorValue(args->at(0));  // Load the object.
   3300 
   3301   Label runtime, done, not_date_object;
   3302   Register object = eax;
   3303   Register result = eax;
   3304   Register scratch = ecx;
   3305 
   3306   __ JumpIfSmi(object, &not_date_object);
   3307   __ CmpObjectType(object, JS_DATE_TYPE, scratch);
   3308   __ j(not_equal, &not_date_object);
   3309 
   3310   if (index->value() == 0) {
   3311     __ mov(result, FieldOperand(object, JSDate::kValueOffset));
   3312     __ jmp(&done);
   3313   } else {
   3314     if (index->value() < JSDate::kFirstUncachedField) {
   3315       ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
   3316       __ mov(scratch, Operand::StaticVariable(stamp));
   3317       __ cmp(scratch, FieldOperand(object, JSDate::kCacheStampOffset));
   3318       __ j(not_equal, &runtime, Label::kNear);
   3319       __ mov(result, FieldOperand(object, JSDate::kValueOffset +
   3320                                           kPointerSize * index->value()));
   3321       __ jmp(&done);
   3322     }
   3323     __ bind(&runtime);
   3324     __ PrepareCallCFunction(2, scratch);
   3325     __ mov(Operand(esp, 0), object);
   3326     __ mov(Operand(esp, 1 * kPointerSize), Immediate(index));
   3327     __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
   3328     __ jmp(&done);
   3329   }
   3330 
   3331   __ bind(&not_date_object);
   3332   __ CallRuntime(Runtime::kHiddenThrowNotDateError, 0);
   3333   __ bind(&done);
   3334   context()->Plug(result);
   3335 }
   3336 
   3337 
   3338 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
   3339   ZoneList<Expression*>* args = expr->arguments();
   3340   ASSERT_EQ(3, args->length());
   3341 
   3342   Register string = eax;
   3343   Register index = ebx;
   3344   Register value = ecx;
   3345 
   3346   VisitForStackValue(args->at(1));  // index
   3347   VisitForStackValue(args->at(2));  // value
   3348   VisitForAccumulatorValue(args->at(0));  // string
   3349 
   3350   __ pop(value);
   3351   __ pop(index);
   3352 
   3353   if (FLAG_debug_code) {
   3354     __ test(value, Immediate(kSmiTagMask));
   3355     __ Check(zero, kNonSmiValue);
   3356     __ test(index, Immediate(kSmiTagMask));
   3357     __ Check(zero, kNonSmiValue);
   3358   }
   3359 
   3360   __ SmiUntag(value);
   3361   __ SmiUntag(index);
   3362 
   3363   if (FLAG_debug_code) {
   3364     static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
   3365     __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
   3366   }
   3367 
   3368   __ mov_b(FieldOperand(string, index, times_1, SeqOneByteString::kHeaderSize),
   3369            value);
   3370   context()->Plug(string);
   3371 }
   3372 
   3373 
   3374 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
   3375   ZoneList<Expression*>* args = expr->arguments();
   3376   ASSERT_EQ(3, args->length());
   3377 
   3378   Register string = eax;
   3379   Register index = ebx;
   3380   Register value = ecx;
   3381 
   3382   VisitForStackValue(args->at(1));  // index
   3383   VisitForStackValue(args->at(2));  // value
   3384   VisitForAccumulatorValue(args->at(0));  // string
   3385   __ pop(value);
   3386   __ pop(index);
   3387 
   3388   if (FLAG_debug_code) {
   3389     __ test(value, Immediate(kSmiTagMask));
   3390     __ Check(zero, kNonSmiValue);
   3391     __ test(index, Immediate(kSmiTagMask));
   3392     __ Check(zero, kNonSmiValue);
   3393     __ SmiUntag(index);
   3394     static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
   3395     __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
   3396     __ SmiTag(index);
   3397   }
   3398 
   3399   __ SmiUntag(value);
   3400   // No need to untag a smi for two-byte addressing.
   3401   __ mov_w(FieldOperand(string, index, times_1, SeqTwoByteString::kHeaderSize),
   3402            value);
   3403   context()->Plug(string);
   3404 }
   3405 
   3406 
   3407 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
   3408   // Load the arguments on the stack and call the runtime function.
   3409   ZoneList<Expression*>* args = expr->arguments();
   3410   ASSERT(args->length() == 2);
   3411   VisitForStackValue(args->at(0));
   3412   VisitForStackValue(args->at(1));
   3413 
   3414   MathPowStub stub(isolate(), MathPowStub::ON_STACK);
   3415   __ CallStub(&stub);
   3416   context()->Plug(eax);
   3417 }
   3418 
   3419 
   3420 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
   3421   ZoneList<Expression*>* args = expr->arguments();
   3422   ASSERT(args->length() == 2);
   3423 
   3424   VisitForStackValue(args->at(0));  // Load the object.
   3425   VisitForAccumulatorValue(args->at(1));  // Load the value.
   3426   __ pop(ebx);  // eax = value. ebx = object.
   3427 
   3428   Label done;
   3429   // If the object is a smi, return the value.
   3430   __ JumpIfSmi(ebx, &done, Label::kNear);
   3431 
   3432   // If the object is not a value type, return the value.
   3433   __ CmpObjectType(ebx, JS_VALUE_TYPE, ecx);
   3434   __ j(not_equal, &done, Label::kNear);
   3435 
   3436   // Store the value.
   3437   __ mov(FieldOperand(ebx, JSValue::kValueOffset), eax);
   3438 
   3439   // Update the write barrier.  Save the value as it will be
   3440   // overwritten by the write barrier code and is needed afterward.
   3441   __ mov(edx, eax);
   3442   __ RecordWriteField(ebx, JSValue::kValueOffset, edx, ecx, kDontSaveFPRegs);
   3443 
   3444   __ bind(&done);
   3445   context()->Plug(eax);
   3446 }
   3447 
   3448 
   3449 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
   3450   ZoneList<Expression*>* args = expr->arguments();
   3451   ASSERT_EQ(args->length(), 1);
   3452 
   3453   // Load the argument into eax and call the stub.
   3454   VisitForAccumulatorValue(args->at(0));
   3455 
   3456   NumberToStringStub stub(isolate());
   3457   __ CallStub(&stub);
   3458   context()->Plug(eax);
   3459 }
   3460 
   3461 
   3462 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
   3463   ZoneList<Expression*>* args = expr->arguments();
   3464   ASSERT(args->length() == 1);
   3465 
   3466   VisitForAccumulatorValue(args->at(0));
   3467 
   3468   Label done;
   3469   StringCharFromCodeGenerator generator(eax, ebx);
   3470   generator.GenerateFast(masm_);
   3471   __ jmp(&done);
   3472 
   3473   NopRuntimeCallHelper call_helper;
   3474   generator.GenerateSlow(masm_, call_helper);
   3475 
   3476   __ bind(&done);
   3477   context()->Plug(ebx);
   3478 }
   3479 
   3480 
   3481 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
   3482   ZoneList<Expression*>* args = expr->arguments();
   3483   ASSERT(args->length() == 2);
   3484 
   3485   VisitForStackValue(args->at(0));
   3486   VisitForAccumulatorValue(args->at(1));
   3487 
   3488   Register object = ebx;
   3489   Register index = eax;
   3490   Register result = edx;
   3491 
   3492   __ pop(object);
   3493 
   3494   Label need_conversion;
   3495   Label index_out_of_range;
   3496   Label done;
   3497   StringCharCodeAtGenerator generator(object,
   3498                                       index,
   3499                                       result,
   3500                                       &need_conversion,
   3501                                       &need_conversion,
   3502                                       &index_out_of_range,
   3503                                       STRING_INDEX_IS_NUMBER);
   3504   generator.GenerateFast(masm_);
   3505   __ jmp(&done);
   3506 
   3507   __ bind(&index_out_of_range);
   3508   // When the index is out of range, the spec requires us to return
   3509   // NaN.
   3510   __ Move(result, Immediate(isolate()->factory()->nan_value()));
   3511   __ jmp(&done);
   3512 
   3513   __ bind(&need_conversion);
   3514   // Move the undefined value into the result register, which will
   3515   // trigger conversion.
   3516   __ Move(result, Immediate(isolate()->factory()->undefined_value()));
   3517   __ jmp(&done);
   3518 
   3519   NopRuntimeCallHelper call_helper;
   3520   generator.GenerateSlow(masm_, call_helper);
   3521 
   3522   __ bind(&done);
   3523   context()->Plug(result);
   3524 }
   3525 
   3526 
   3527 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
   3528   ZoneList<Expression*>* args = expr->arguments();
   3529   ASSERT(args->length() == 2);
   3530 
   3531   VisitForStackValue(args->at(0));
   3532   VisitForAccumulatorValue(args->at(1));
   3533 
   3534   Register object = ebx;
   3535   Register index = eax;
   3536   Register scratch = edx;
   3537   Register result = eax;
   3538 
   3539   __ pop(object);
   3540 
   3541   Label need_conversion;
   3542   Label index_out_of_range;
   3543   Label done;
   3544   StringCharAtGenerator generator(object,
   3545                                   index,
   3546                                   scratch,
   3547                                   result,
   3548                                   &need_conversion,
   3549                                   &need_conversion,
   3550                                   &index_out_of_range,
   3551                                   STRING_INDEX_IS_NUMBER);
   3552   generator.GenerateFast(masm_);
   3553   __ jmp(&done);
   3554 
   3555   __ bind(&index_out_of_range);
   3556   // When the index is out of range, the spec requires us to return
   3557   // the empty string.
   3558   __ Move(result, Immediate(isolate()->factory()->empty_string()));
   3559   __ jmp(&done);
   3560 
   3561   __ bind(&need_conversion);
   3562   // Move smi zero into the result register, which will trigger
   3563   // conversion.
   3564   __ Move(result, Immediate(Smi::FromInt(0)));
   3565   __ jmp(&done);
   3566 
   3567   NopRuntimeCallHelper call_helper;
   3568   generator.GenerateSlow(masm_, call_helper);
   3569 
   3570   __ bind(&done);
   3571   context()->Plug(result);
   3572 }
   3573 
   3574 
   3575 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
   3576   ZoneList<Expression*>* args = expr->arguments();
   3577   ASSERT_EQ(2, args->length());
   3578   VisitForStackValue(args->at(0));
   3579   VisitForAccumulatorValue(args->at(1));
   3580 
   3581   __ pop(edx);
   3582   StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
   3583   __ CallStub(&stub);
   3584   context()->Plug(eax);
   3585 }
   3586 
   3587 
   3588 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
   3589   ZoneList<Expression*>* args = expr->arguments();
   3590   ASSERT_EQ(2, args->length());
   3591 
   3592   VisitForStackValue(args->at(0));
   3593   VisitForStackValue(args->at(1));
   3594 
   3595   StringCompareStub stub(isolate());
   3596   __ CallStub(&stub);
   3597   context()->Plug(eax);
   3598 }
   3599 
   3600 
   3601 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
   3602   ZoneList<Expression*>* args = expr->arguments();
   3603   ASSERT(args->length() >= 2);
   3604 
   3605   int arg_count = args->length() - 2;  // 2 ~ receiver and function.
   3606   for (int i = 0; i < arg_count + 1; ++i) {
   3607     VisitForStackValue(args->at(i));
   3608   }
   3609   VisitForAccumulatorValue(args->last());  // Function.
   3610 
   3611   Label runtime, done;
   3612   // Check for non-function argument (including proxy).
   3613   __ JumpIfSmi(eax, &runtime);
   3614   __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
   3615   __ j(not_equal, &runtime);
   3616 
   3617   // InvokeFunction requires the function in edi. Move it in there.
   3618   __ mov(edi, result_register());
   3619   ParameterCount count(arg_count);
   3620   __ InvokeFunction(edi, count, CALL_FUNCTION, NullCallWrapper());
   3621   __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
   3622   __ jmp(&done);
   3623 
   3624   __ bind(&runtime);
   3625   __ push(eax);
   3626   __ CallRuntime(Runtime::kCall, args->length());
   3627   __ bind(&done);
   3628 
   3629   context()->Plug(eax);
   3630 }
   3631 
   3632 
   3633 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
   3634   // Load the arguments on the stack and call the stub.
   3635   RegExpConstructResultStub stub(isolate());
   3636   ZoneList<Expression*>* args = expr->arguments();
   3637   ASSERT(args->length() == 3);
   3638   VisitForStackValue(args->at(0));
   3639   VisitForStackValue(args->at(1));
   3640   VisitForAccumulatorValue(args->at(2));
   3641   __ pop(ebx);
   3642   __ pop(ecx);
   3643   __ CallStub(&stub);
   3644   context()->Plug(eax);
   3645 }
   3646 
   3647 
   3648 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
   3649   ZoneList<Expression*>* args = expr->arguments();
   3650   ASSERT_EQ(2, args->length());
   3651 
   3652   ASSERT_NE(NULL, args->at(0)->AsLiteral());
   3653   int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
   3654 
   3655   Handle<FixedArray> jsfunction_result_caches(
   3656       isolate()->native_context()->jsfunction_result_caches());
   3657   if (jsfunction_result_caches->length() <= cache_id) {
   3658     __ Abort(kAttemptToUseUndefinedCache);
   3659     __ mov(eax, isolate()->factory()->undefined_value());
   3660     context()->Plug(eax);
   3661     return;
   3662   }
   3663 
   3664   VisitForAccumulatorValue(args->at(1));
   3665 
   3666   Register key = eax;
   3667   Register cache = ebx;
   3668   Register tmp = ecx;
   3669   __ mov(cache, ContextOperand(esi, Context::GLOBAL_OBJECT_INDEX));
   3670   __ mov(cache,
   3671          FieldOperand(cache, GlobalObject::kNativeContextOffset));
   3672   __ mov(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
   3673   __ mov(cache,
   3674          FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
   3675 
   3676   Label done, not_found;
   3677   STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
   3678   __ mov(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset));
   3679   // tmp now holds finger offset as a smi.
   3680   __ cmp(key, FixedArrayElementOperand(cache, tmp));
   3681   __ j(not_equal, &not_found);
   3682 
   3683   __ mov(eax, FixedArrayElementOperand(cache, tmp, 1));
   3684   __ jmp(&done);
   3685 
   3686   __ bind(&not_found);
   3687   // Call runtime to perform the lookup.
   3688   __ push(cache);
   3689   __ push(key);
   3690   __ CallRuntime(Runtime::kHiddenGetFromCache, 2);
   3691 
   3692   __ bind(&done);
   3693   context()->Plug(eax);
   3694 }
   3695 
   3696 
   3697 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
   3698   ZoneList<Expression*>* args = expr->arguments();
   3699   ASSERT(args->length() == 1);
   3700 
   3701   VisitForAccumulatorValue(args->at(0));
   3702 
   3703   __ AssertString(eax);
   3704 
   3705   Label materialize_true, materialize_false;
   3706   Label* if_true = NULL;
   3707   Label* if_false = NULL;
   3708   Label* fall_through = NULL;
   3709   context()->PrepareTest(&materialize_true, &materialize_false,
   3710                          &if_true, &if_false, &fall_through);
   3711 
   3712   __ test(FieldOperand(eax, String::kHashFieldOffset),
   3713           Immediate(String::kContainsCachedArrayIndexMask));
   3714   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   3715   Split(zero, if_true, if_false, fall_through);
   3716 
   3717   context()->Plug(if_true, if_false);
   3718 }
   3719 
   3720 
   3721 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
   3722   ZoneList<Expression*>* args = expr->arguments();
   3723   ASSERT(args->length() == 1);
   3724   VisitForAccumulatorValue(args->at(0));
   3725 
   3726   __ AssertString(eax);
   3727 
   3728   __ mov(eax, FieldOperand(eax, String::kHashFieldOffset));
   3729   __ IndexFromHash(eax, eax);
   3730 
   3731   context()->Plug(eax);
   3732 }
   3733 
   3734 
   3735 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
   3736   Label bailout, done, one_char_separator, long_separator,
   3737       non_trivial_array, not_size_one_array, loop,
   3738       loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
   3739 
   3740   ZoneList<Expression*>* args = expr->arguments();
   3741   ASSERT(args->length() == 2);
   3742   // We will leave the separator on the stack until the end of the function.
   3743   VisitForStackValue(args->at(1));
   3744   // Load this to eax (= array)
   3745   VisitForAccumulatorValue(args->at(0));
   3746   // All aliases of the same register have disjoint lifetimes.
   3747   Register array = eax;
   3748   Register elements = no_reg;  // Will be eax.
   3749 
   3750   Register index = edx;
   3751 
   3752   Register string_length = ecx;
   3753 
   3754   Register string = esi;
   3755 
   3756   Register scratch = ebx;
   3757 
   3758   Register array_length = edi;
   3759   Register result_pos = no_reg;  // Will be edi.
   3760 
   3761   // Separator operand is already pushed.
   3762   Operand separator_operand = Operand(esp, 2 * kPointerSize);
   3763   Operand result_operand = Operand(esp, 1 * kPointerSize);
   3764   Operand array_length_operand = Operand(esp, 0);
   3765   __ sub(esp, Immediate(2 * kPointerSize));
   3766   __ cld();
   3767   // Check that the array is a JSArray
   3768   __ JumpIfSmi(array, &bailout);
   3769   __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
   3770   __ j(not_equal, &bailout);
   3771 
   3772   // Check that the array has fast elements.
   3773   __ CheckFastElements(scratch, &bailout);
   3774 
   3775   // If the array has length zero, return the empty string.
   3776   __ mov(array_length, FieldOperand(array, JSArray::kLengthOffset));
   3777   __ SmiUntag(array_length);
   3778   __ j(not_zero, &non_trivial_array);
   3779   __ mov(result_operand, isolate()->factory()->empty_string());
   3780   __ jmp(&done);
   3781 
   3782   // Save the array length.
   3783   __ bind(&non_trivial_array);
   3784   __ mov(array_length_operand, array_length);
   3785 
   3786   // Save the FixedArray containing array's elements.
   3787   // End of array's live range.
   3788   elements = array;
   3789   __ mov(elements, FieldOperand(array, JSArray::kElementsOffset));
   3790   array = no_reg;
   3791 
   3792 
   3793   // Check that all array elements are sequential ASCII strings, and
   3794   // accumulate the sum of their lengths, as a smi-encoded value.
   3795   __ Move(index, Immediate(0));
   3796   __ Move(string_length, Immediate(0));
   3797   // Loop condition: while (index < length).
   3798   // Live loop registers: index, array_length, string,
   3799   //                      scratch, string_length, elements.
   3800   if (generate_debug_code_) {
   3801     __ cmp(index, array_length);
   3802     __ Assert(less, kNoEmptyArraysHereInEmitFastAsciiArrayJoin);
   3803   }
   3804   __ bind(&loop);
   3805   __ mov(string, FieldOperand(elements,
   3806                               index,
   3807                               times_pointer_size,
   3808                               FixedArray::kHeaderSize));
   3809   __ JumpIfSmi(string, &bailout);
   3810   __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
   3811   __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
   3812   __ and_(scratch, Immediate(
   3813       kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
   3814   __ cmp(scratch, kStringTag | kOneByteStringTag | kSeqStringTag);
   3815   __ j(not_equal, &bailout);
   3816   __ add(string_length,
   3817          FieldOperand(string, SeqOneByteString::kLengthOffset));
   3818   __ j(overflow, &bailout);
   3819   __ add(index, Immediate(1));
   3820   __ cmp(index, array_length);
   3821   __ j(less, &loop);
   3822 
   3823   // If array_length is 1, return elements[0], a string.
   3824   __ cmp(array_length, 1);
   3825   __ j(not_equal, &not_size_one_array);
   3826   __ mov(scratch, FieldOperand(elements, FixedArray::kHeaderSize));
   3827   __ mov(result_operand, scratch);
   3828   __ jmp(&done);
   3829 
   3830   __ bind(&not_size_one_array);
   3831 
   3832   // End of array_length live range.
   3833   result_pos = array_length;
   3834   array_length = no_reg;
   3835 
   3836   // Live registers:
   3837   // string_length: Sum of string lengths, as a smi.
   3838   // elements: FixedArray of strings.
   3839 
   3840   // Check that the separator is a flat ASCII string.
   3841   __ mov(string, separator_operand);
   3842   __ JumpIfSmi(string, &bailout);
   3843   __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
   3844   __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
   3845   __ and_(scratch, Immediate(
   3846       kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
   3847   __ cmp(scratch, kStringTag | kOneByteStringTag | kSeqStringTag);
   3848   __ j(not_equal, &bailout);
   3849 
   3850   // Add (separator length times array_length) - separator length
   3851   // to string_length.
   3852   __ mov(scratch, separator_operand);
   3853   __ mov(scratch, FieldOperand(scratch, SeqOneByteString::kLengthOffset));
   3854   __ sub(string_length, scratch);  // May be negative, temporarily.
   3855   __ imul(scratch, array_length_operand);
   3856   __ j(overflow, &bailout);
   3857   __ add(string_length, scratch);
   3858   __ j(overflow, &bailout);
   3859 
   3860   __ shr(string_length, 1);
   3861   // Live registers and stack values:
   3862   //   string_length
   3863   //   elements
   3864   __ AllocateAsciiString(result_pos, string_length, scratch,
   3865                          index, string, &bailout);
   3866   __ mov(result_operand, result_pos);
   3867   __ lea(result_pos, FieldOperand(result_pos, SeqOneByteString::kHeaderSize));
   3868 
   3869 
   3870   __ mov(string, separator_operand);
   3871   __ cmp(FieldOperand(string, SeqOneByteString::kLengthOffset),
   3872          Immediate(Smi::FromInt(1)));
   3873   __ j(equal, &one_char_separator);
   3874   __ j(greater, &long_separator);
   3875 
   3876 
   3877   // Empty separator case
   3878   __ mov(index, Immediate(0));
   3879   __ jmp(&loop_1_condition);
   3880   // Loop condition: while (index < length).
   3881   __ bind(&loop_1);
   3882   // Each iteration of the loop concatenates one string to the result.
   3883   // Live values in registers:
   3884   //   index: which element of the elements array we are adding to the result.
   3885   //   result_pos: the position to which we are currently copying characters.
   3886   //   elements: the FixedArray of strings we are joining.
   3887 
   3888   // Get string = array[index].
   3889   __ mov(string, FieldOperand(elements, index,
   3890                               times_pointer_size,
   3891                               FixedArray::kHeaderSize));
   3892   __ mov(string_length,
   3893          FieldOperand(string, String::kLengthOffset));
   3894   __ shr(string_length, 1);
   3895   __ lea(string,
   3896          FieldOperand(string, SeqOneByteString::kHeaderSize));
   3897   __ CopyBytes(string, result_pos, string_length, scratch);
   3898   __ add(index, Immediate(1));
   3899   __ bind(&loop_1_condition);
   3900   __ cmp(index, array_length_operand);
   3901   __ j(less, &loop_1);  // End while (index < length).
   3902   __ jmp(&done);
   3903 
   3904 
   3905 
   3906   // One-character separator case
   3907   __ bind(&one_char_separator);
   3908   // Replace separator with its ASCII character value.
   3909   __ mov_b(scratch, FieldOperand(string, SeqOneByteString::kHeaderSize));
   3910   __ mov_b(separator_operand, scratch);
   3911 
   3912   __ Move(index, Immediate(0));
   3913   // Jump into the loop after the code that copies the separator, so the first
   3914   // element is not preceded by a separator
   3915   __ jmp(&loop_2_entry);
   3916   // Loop condition: while (index < length).
   3917   __ bind(&loop_2);
   3918   // Each iteration of the loop concatenates one string to the result.
   3919   // Live values in registers:
   3920   //   index: which element of the elements array we are adding to the result.
   3921   //   result_pos: the position to which we are currently copying characters.
   3922 
   3923   // Copy the separator character to the result.
   3924   __ mov_b(scratch, separator_operand);
   3925   __ mov_b(Operand(result_pos, 0), scratch);
   3926   __ inc(result_pos);
   3927 
   3928   __ bind(&loop_2_entry);
   3929   // Get string = array[index].
   3930   __ mov(string, FieldOperand(elements, index,
   3931                               times_pointer_size,
   3932                               FixedArray::kHeaderSize));
   3933   __ mov(string_length,
   3934          FieldOperand(string, String::kLengthOffset));
   3935   __ shr(string_length, 1);
   3936   __ lea(string,
   3937          FieldOperand(string, SeqOneByteString::kHeaderSize));
   3938   __ CopyBytes(string, result_pos, string_length, scratch);
   3939   __ add(index, Immediate(1));
   3940 
   3941   __ cmp(index, array_length_operand);
   3942   __ j(less, &loop_2);  // End while (index < length).
   3943   __ jmp(&done);
   3944 
   3945 
   3946   // Long separator case (separator is more than one character).
   3947   __ bind(&long_separator);
   3948 
   3949   __ Move(index, Immediate(0));
   3950   // Jump into the loop after the code that copies the separator, so the first
   3951   // element is not preceded by a separator
   3952   __ jmp(&loop_3_entry);
   3953   // Loop condition: while (index < length).
   3954   __ bind(&loop_3);
   3955   // Each iteration of the loop concatenates one string to the result.
   3956   // Live values in registers:
   3957   //   index: which element of the elements array we are adding to the result.
   3958   //   result_pos: the position to which we are currently copying characters.
   3959 
   3960   // Copy the separator to the result.
   3961   __ mov(string, separator_operand);
   3962   __ mov(string_length,
   3963          FieldOperand(string, String::kLengthOffset));
   3964   __ shr(string_length, 1);
   3965   __ lea(string,
   3966          FieldOperand(string, SeqOneByteString::kHeaderSize));
   3967   __ CopyBytes(string, result_pos, string_length, scratch);
   3968 
   3969   __ bind(&loop_3_entry);
   3970   // Get string = array[index].
   3971   __ mov(string, FieldOperand(elements, index,
   3972                               times_pointer_size,
   3973                               FixedArray::kHeaderSize));
   3974   __ mov(string_length,
   3975          FieldOperand(string, String::kLengthOffset));
   3976   __ shr(string_length, 1);
   3977   __ lea(string,
   3978          FieldOperand(string, SeqOneByteString::kHeaderSize));
   3979   __ CopyBytes(string, result_pos, string_length, scratch);
   3980   __ add(index, Immediate(1));
   3981 
   3982   __ cmp(index, array_length_operand);
   3983   __ j(less, &loop_3);  // End while (index < length).
   3984   __ jmp(&done);
   3985 
   3986 
   3987   __ bind(&bailout);
   3988   __ mov(result_operand, isolate()->factory()->undefined_value());
   3989   __ bind(&done);
   3990   __ mov(eax, result_operand);
   3991   // Drop temp values from the stack, and restore context register.
   3992   __ add(esp, Immediate(3 * kPointerSize));
   3993 
   3994   __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
   3995   context()->Plug(eax);
   3996 }
   3997 
   3998 
   3999 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
   4000   if (expr->function() != NULL &&
   4001       expr->function()->intrinsic_type == Runtime::INLINE) {
   4002     Comment cmnt(masm_, "[ InlineRuntimeCall");
   4003     EmitInlineRuntimeCall(expr);
   4004     return;
   4005   }
   4006 
   4007   Comment cmnt(masm_, "[ CallRuntime");
   4008   ZoneList<Expression*>* args = expr->arguments();
   4009 
   4010   if (expr->is_jsruntime()) {
   4011     // Push the builtins object as receiver.
   4012     __ mov(eax, GlobalObjectOperand());
   4013     __ push(FieldOperand(eax, GlobalObject::kBuiltinsOffset));
   4014 
   4015     // Load the function from the receiver.
   4016     __ mov(edx, Operand(esp, 0));
   4017     __ mov(ecx, Immediate(expr->name()));
   4018     CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
   4019 
   4020     // Push the target function under the receiver.
   4021     __ push(Operand(esp, 0));
   4022     __ mov(Operand(esp, kPointerSize), eax);
   4023 
   4024     // Code common for calls using the IC.
   4025     ZoneList<Expression*>* args = expr->arguments();
   4026     int arg_count = args->length();
   4027     for (int i = 0; i < arg_count; i++) {
   4028       VisitForStackValue(args->at(i));
   4029     }
   4030 
   4031     // Record source position of the IC call.
   4032     SetSourcePosition(expr->position());
   4033     CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
   4034     __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
   4035     __ CallStub(&stub);
   4036     // Restore context register.
   4037     __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
   4038     context()->DropAndPlug(1, eax);
   4039 
   4040   } else {
   4041     // Push the arguments ("left-to-right").
   4042     int arg_count = args->length();
   4043     for (int i = 0; i < arg_count; i++) {
   4044       VisitForStackValue(args->at(i));
   4045     }
   4046 
   4047     // Call the C runtime function.
   4048     __ CallRuntime(expr->function(), arg_count);
   4049 
   4050     context()->Plug(eax);
   4051   }
   4052 }
   4053 
   4054 
   4055 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
   4056   switch (expr->op()) {
   4057     case Token::DELETE: {
   4058       Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
   4059       Property* property = expr->expression()->AsProperty();
   4060       VariableProxy* proxy = expr->expression()->AsVariableProxy();
   4061 
   4062       if (property != NULL) {
   4063         VisitForStackValue(property->obj());
   4064         VisitForStackValue(property->key());
   4065         __ push(Immediate(Smi::FromInt(strict_mode())));
   4066         __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
   4067         context()->Plug(eax);
   4068       } else if (proxy != NULL) {
   4069         Variable* var = proxy->var();
   4070         // Delete of an unqualified identifier is disallowed in strict mode
   4071         // but "delete this" is allowed.
   4072         ASSERT(strict_mode() == SLOPPY || var->is_this());
   4073         if (var->IsUnallocated()) {
   4074           __ push(GlobalObjectOperand());
   4075           __ push(Immediate(var->name()));
   4076           __ push(Immediate(Smi::FromInt(SLOPPY)));
   4077           __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
   4078           context()->Plug(eax);
   4079         } else if (var->IsStackAllocated() || var->IsContextSlot()) {
   4080           // Result of deleting non-global variables is false.  'this' is
   4081           // not really a variable, though we implement it as one.  The
   4082           // subexpression does not have side effects.
   4083           context()->Plug(var->is_this());
   4084         } else {
   4085           // Non-global variable.  Call the runtime to try to delete from the
   4086           // context where the variable was introduced.
   4087           __ push(context_register());
   4088           __ push(Immediate(var->name()));
   4089           __ CallRuntime(Runtime::kHiddenDeleteContextSlot, 2);
   4090           context()->Plug(eax);
   4091         }
   4092       } else {
   4093         // Result of deleting non-property, non-variable reference is true.
   4094         // The subexpression may have side effects.
   4095         VisitForEffect(expr->expression());
   4096         context()->Plug(true);
   4097       }
   4098       break;
   4099     }
   4100 
   4101     case Token::VOID: {
   4102       Comment cmnt(masm_, "[ UnaryOperation (VOID)");
   4103       VisitForEffect(expr->expression());
   4104       context()->Plug(isolate()->factory()->undefined_value());
   4105       break;
   4106     }
   4107 
   4108     case Token::NOT: {
   4109       Comment cmnt(masm_, "[ UnaryOperation (NOT)");
   4110       if (context()->IsEffect()) {
   4111         // Unary NOT has no side effects so it's only necessary to visit the
   4112         // subexpression.  Match the optimizing compiler by not branching.
   4113         VisitForEffect(expr->expression());
   4114       } else if (context()->IsTest()) {
   4115         const TestContext* test = TestContext::cast(context());
   4116         // The labels are swapped for the recursive call.
   4117         VisitForControl(expr->expression(),
   4118                         test->false_label(),
   4119                         test->true_label(),
   4120                         test->fall_through());
   4121         context()->Plug(test->true_label(), test->false_label());
   4122       } else {
   4123         // We handle value contexts explicitly rather than simply visiting
   4124         // for control and plugging the control flow into the context,
   4125         // because we need to prepare a pair of extra administrative AST ids
   4126         // for the optimizing compiler.
   4127         ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
   4128         Label materialize_true, materialize_false, done;
   4129         VisitForControl(expr->expression(),
   4130                         &materialize_false,
   4131                         &materialize_true,
   4132                         &materialize_true);
   4133         __ bind(&materialize_true);
   4134         PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
   4135         if (context()->IsAccumulatorValue()) {
   4136           __ mov(eax, isolate()->factory()->true_value());
   4137         } else {
   4138           __ Push(isolate()->factory()->true_value());
   4139         }
   4140         __ jmp(&done, Label::kNear);
   4141         __ bind(&materialize_false);
   4142         PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
   4143         if (context()->IsAccumulatorValue()) {
   4144           __ mov(eax, isolate()->factory()->false_value());
   4145         } else {
   4146           __ Push(isolate()->factory()->false_value());
   4147         }
   4148         __ bind(&done);
   4149       }
   4150       break;
   4151     }
   4152 
   4153     case Token::TYPEOF: {
   4154       Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
   4155       { StackValueContext context(this);
   4156         VisitForTypeofValue(expr->expression());
   4157       }
   4158       __ CallRuntime(Runtime::kTypeof, 1);
   4159       context()->Plug(eax);
   4160       break;
   4161     }
   4162 
   4163     default:
   4164       UNREACHABLE();
   4165   }
   4166 }
   4167 
   4168 
   4169 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
   4170   ASSERT(expr->expression()->IsValidReferenceExpression());
   4171 
   4172   Comment cmnt(masm_, "[ CountOperation");
   4173   SetSourcePosition(expr->position());
   4174 
   4175   // Expression can only be a property, a global or a (parameter or local)
   4176   // slot.
   4177   enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
   4178   LhsKind assign_type = VARIABLE;
   4179   Property* prop = expr->expression()->AsProperty();
   4180   // In case of a property we use the uninitialized expression context
   4181   // of the key to detect a named property.
   4182   if (prop != NULL) {
   4183     assign_type =
   4184         (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
   4185   }
   4186 
   4187   // Evaluate expression and get value.
   4188   if (assign_type == VARIABLE) {
   4189     ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
   4190     AccumulatorValueContext context(this);
   4191     EmitVariableLoad(expr->expression()->AsVariableProxy());
   4192   } else {
   4193     // Reserve space for result of postfix operation.
   4194     if (expr->is_postfix() && !context()->IsEffect()) {
   4195       __ push(Immediate(Smi::FromInt(0)));
   4196     }
   4197     if (assign_type == NAMED_PROPERTY) {
   4198       // Put the object both on the stack and in edx.
   4199       VisitForAccumulatorValue(prop->obj());
   4200       __ push(eax);
   4201       __ mov(edx, eax);
   4202       EmitNamedPropertyLoad(prop);
   4203     } else {
   4204       VisitForStackValue(prop->obj());
   4205       VisitForStackValue(prop->key());
   4206       __ mov(edx, Operand(esp, kPointerSize));  // Object.
   4207       __ mov(ecx, Operand(esp, 0));             // Key.
   4208       EmitKeyedPropertyLoad(prop);
   4209     }
   4210   }
   4211 
   4212   // We need a second deoptimization point after loading the value
   4213   // in case evaluating the property load my have a side effect.
   4214   if (assign_type == VARIABLE) {
   4215     PrepareForBailout(expr->expression(), TOS_REG);
   4216   } else {
   4217     PrepareForBailoutForId(prop->LoadId(), TOS_REG);
   4218   }
   4219 
   4220   // Inline smi case if we are in a loop.
   4221   Label done, stub_call;
   4222   JumpPatchSite patch_site(masm_);
   4223   if (ShouldInlineSmiCase(expr->op())) {
   4224     Label slow;
   4225     patch_site.EmitJumpIfNotSmi(eax, &slow, Label::kNear);
   4226 
   4227     // Save result for postfix expressions.
   4228     if (expr->is_postfix()) {
   4229       if (!context()->IsEffect()) {
   4230         // Save the result on the stack. If we have a named or keyed property
   4231         // we store the result under the receiver that is currently on top
   4232         // of the stack.
   4233         switch (assign_type) {
   4234           case VARIABLE:
   4235             __ push(eax);
   4236             break;
   4237           case NAMED_PROPERTY:
   4238             __ mov(Operand(esp, kPointerSize), eax);
   4239             break;
   4240           case KEYED_PROPERTY:
   4241             __ mov(Operand(esp, 2 * kPointerSize), eax);
   4242             break;
   4243         }
   4244       }
   4245     }
   4246 
   4247     if (expr->op() == Token::INC) {
   4248       __ add(eax, Immediate(Smi::FromInt(1)));
   4249     } else {
   4250       __ sub(eax, Immediate(Smi::FromInt(1)));
   4251     }
   4252     __ j(no_overflow, &done, Label::kNear);
   4253     // Call stub. Undo operation first.
   4254     if (expr->op() == Token::INC) {
   4255       __ sub(eax, Immediate(Smi::FromInt(1)));
   4256     } else {
   4257       __ add(eax, Immediate(Smi::FromInt(1)));
   4258     }
   4259     __ jmp(&stub_call, Label::kNear);
   4260     __ bind(&slow);
   4261   }
   4262   ToNumberStub convert_stub(isolate());
   4263   __ CallStub(&convert_stub);
   4264 
   4265   // Save result for postfix expressions.
   4266   if (expr->is_postfix()) {
   4267     if (!context()->IsEffect()) {
   4268       // Save the result on the stack. If we have a named or keyed property
   4269       // we store the result under the receiver that is currently on top
   4270       // of the stack.
   4271       switch (assign_type) {
   4272         case VARIABLE:
   4273           __ push(eax);
   4274           break;
   4275         case NAMED_PROPERTY:
   4276           __ mov(Operand(esp, kPointerSize), eax);
   4277           break;
   4278         case KEYED_PROPERTY:
   4279           __ mov(Operand(esp, 2 * kPointerSize), eax);
   4280           break;
   4281       }
   4282     }
   4283   }
   4284 
   4285   // Record position before stub call.
   4286   SetSourcePosition(expr->position());
   4287 
   4288   // Call stub for +1/-1.
   4289   __ bind(&stub_call);
   4290   __ mov(edx, eax);
   4291   __ mov(eax, Immediate(Smi::FromInt(1)));
   4292   BinaryOpICStub stub(isolate(), expr->binary_op(), NO_OVERWRITE);
   4293   CallIC(stub.GetCode(), expr->CountBinOpFeedbackId());
   4294   patch_site.EmitPatchInfo();
   4295   __ bind(&done);
   4296 
   4297   // Store the value returned in eax.
   4298   switch (assign_type) {
   4299     case VARIABLE:
   4300       if (expr->is_postfix()) {
   4301         // Perform the assignment as if via '='.
   4302         { EffectContext context(this);
   4303           EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
   4304                                  Token::ASSIGN);
   4305           PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   4306           context.Plug(eax);
   4307         }
   4308         // For all contexts except EffectContext We have the result on
   4309         // top of the stack.
   4310         if (!context()->IsEffect()) {
   4311           context()->PlugTOS();
   4312         }
   4313       } else {
   4314         // Perform the assignment as if via '='.
   4315         EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
   4316                                Token::ASSIGN);
   4317         PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   4318         context()->Plug(eax);
   4319       }
   4320       break;
   4321     case NAMED_PROPERTY: {
   4322       __ mov(ecx, prop->key()->AsLiteral()->value());
   4323       __ pop(edx);
   4324       CallStoreIC(expr->CountStoreFeedbackId());
   4325       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   4326       if (expr->is_postfix()) {
   4327         if (!context()->IsEffect()) {
   4328           context()->PlugTOS();
   4329         }
   4330       } else {
   4331         context()->Plug(eax);
   4332       }
   4333       break;
   4334     }
   4335     case KEYED_PROPERTY: {
   4336       __ pop(ecx);
   4337       __ pop(edx);
   4338       Handle<Code> ic = strict_mode() == SLOPPY
   4339           ? isolate()->builtins()->KeyedStoreIC_Initialize()
   4340           : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
   4341       CallIC(ic, expr->CountStoreFeedbackId());
   4342       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
   4343       if (expr->is_postfix()) {
   4344         // Result is on the stack
   4345         if (!context()->IsEffect()) {
   4346           context()->PlugTOS();
   4347         }
   4348       } else {
   4349         context()->Plug(eax);
   4350       }
   4351       break;
   4352     }
   4353   }
   4354 }
   4355 
   4356 
   4357 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
   4358   VariableProxy* proxy = expr->AsVariableProxy();
   4359   ASSERT(!context()->IsEffect());
   4360   ASSERT(!context()->IsTest());
   4361 
   4362   if (proxy != NULL && proxy->var()->IsUnallocated()) {
   4363     Comment cmnt(masm_, "[ Global variable");
   4364     __ mov(edx, GlobalObjectOperand());
   4365     __ mov(ecx, Immediate(proxy->name()));
   4366     // Use a regular load, not a contextual load, to avoid a reference
   4367     // error.
   4368     CallLoadIC(NOT_CONTEXTUAL);
   4369     PrepareForBailout(expr, TOS_REG);
   4370     context()->Plug(eax);
   4371   } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
   4372     Comment cmnt(masm_, "[ Lookup slot");
   4373     Label done, slow;
   4374 
   4375     // Generate code for loading from variables potentially shadowed
   4376     // by eval-introduced variables.
   4377     EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
   4378 
   4379     __ bind(&slow);
   4380     __ push(esi);
   4381     __ push(Immediate(proxy->name()));
   4382     __ CallRuntime(Runtime::kHiddenLoadContextSlotNoReferenceError, 2);
   4383     PrepareForBailout(expr, TOS_REG);
   4384     __ bind(&done);
   4385 
   4386     context()->Plug(eax);
   4387   } else {
   4388     // This expression cannot throw a reference error at the top level.
   4389     VisitInDuplicateContext(expr);
   4390   }
   4391 }
   4392 
   4393 
   4394 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
   4395                                                  Expression* sub_expr,
   4396                                                  Handle<String> check) {
   4397   Label materialize_true, materialize_false;
   4398   Label* if_true = NULL;
   4399   Label* if_false = NULL;
   4400   Label* fall_through = NULL;
   4401   context()->PrepareTest(&materialize_true, &materialize_false,
   4402                          &if_true, &if_false, &fall_through);
   4403 
   4404   { AccumulatorValueContext context(this);
   4405     VisitForTypeofValue(sub_expr);
   4406   }
   4407   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   4408 
   4409   Factory* factory = isolate()->factory();
   4410   if (String::Equals(check, factory->number_string())) {
   4411     __ JumpIfSmi(eax, if_true);
   4412     __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
   4413            isolate()->factory()->heap_number_map());
   4414     Split(equal, if_true, if_false, fall_through);
   4415   } else if (String::Equals(check, factory->string_string())) {
   4416     __ JumpIfSmi(eax, if_false);
   4417     __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edx);
   4418     __ j(above_equal, if_false);
   4419     // Check for undetectable objects => false.
   4420     __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
   4421               1 << Map::kIsUndetectable);
   4422     Split(zero, if_true, if_false, fall_through);
   4423   } else if (String::Equals(check, factory->symbol_string())) {
   4424     __ JumpIfSmi(eax, if_false);
   4425     __ CmpObjectType(eax, SYMBOL_TYPE, edx);
   4426     Split(equal, if_true, if_false, fall_through);
   4427   } else if (String::Equals(check, factory->boolean_string())) {
   4428     __ cmp(eax, isolate()->factory()->true_value());
   4429     __ j(equal, if_true);
   4430     __ cmp(eax, isolate()->factory()->false_value());
   4431     Split(equal, if_true, if_false, fall_through);
   4432   } else if (FLAG_harmony_typeof &&
   4433              String::Equals(check, factory->null_string())) {
   4434     __ cmp(eax, isolate()->factory()->null_value());
   4435     Split(equal, if_true, if_false, fall_through);
   4436   } else if (String::Equals(check, factory->undefined_string())) {
   4437     __ cmp(eax, isolate()->factory()->undefined_value());
   4438     __ j(equal, if_true);
   4439     __ JumpIfSmi(eax, if_false);
   4440     // Check for undetectable objects => true.
   4441     __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
   4442     __ movzx_b(ecx, FieldOperand(edx, Map::kBitFieldOffset));
   4443     __ test(ecx, Immediate(1 << Map::kIsUndetectable));
   4444     Split(not_zero, if_true, if_false, fall_through);
   4445   } else if (String::Equals(check, factory->function_string())) {
   4446     __ JumpIfSmi(eax, if_false);
   4447     STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
   4448     __ CmpObjectType(eax, JS_FUNCTION_TYPE, edx);
   4449     __ j(equal, if_true);
   4450     __ CmpInstanceType(edx, JS_FUNCTION_PROXY_TYPE);
   4451     Split(equal, if_true, if_false, fall_through);
   4452   } else if (String::Equals(check, factory->object_string())) {
   4453     __ JumpIfSmi(eax, if_false);
   4454     if (!FLAG_harmony_typeof) {
   4455       __ cmp(eax, isolate()->factory()->null_value());
   4456       __ j(equal, if_true);
   4457     }
   4458     __ CmpObjectType(eax, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, edx);
   4459     __ j(below, if_false);
   4460     __ CmpInstanceType(edx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
   4461     __ j(above, if_false);
   4462     // Check for undetectable objects => false.
   4463     __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
   4464               1 << Map::kIsUndetectable);
   4465     Split(zero, if_true, if_false, fall_through);
   4466   } else {
   4467     if (if_false != fall_through) __ jmp(if_false);
   4468   }
   4469   context()->Plug(if_true, if_false);
   4470 }
   4471 
   4472 
   4473 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
   4474   Comment cmnt(masm_, "[ CompareOperation");
   4475   SetSourcePosition(expr->position());
   4476 
   4477   // First we try a fast inlined version of the compare when one of
   4478   // the operands is a literal.
   4479   if (TryLiteralCompare(expr)) return;
   4480 
   4481   // Always perform the comparison for its control flow.  Pack the result
   4482   // into the expression's context after the comparison is performed.
   4483   Label materialize_true, materialize_false;
   4484   Label* if_true = NULL;
   4485   Label* if_false = NULL;
   4486   Label* fall_through = NULL;
   4487   context()->PrepareTest(&materialize_true, &materialize_false,
   4488                          &if_true, &if_false, &fall_through);
   4489 
   4490   Token::Value op = expr->op();
   4491   VisitForStackValue(expr->left());
   4492   switch (op) {
   4493     case Token::IN:
   4494       VisitForStackValue(expr->right());
   4495       __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
   4496       PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
   4497       __ cmp(eax, isolate()->factory()->true_value());
   4498       Split(equal, if_true, if_false, fall_through);
   4499       break;
   4500 
   4501     case Token::INSTANCEOF: {
   4502       VisitForStackValue(expr->right());
   4503       InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
   4504       __ CallStub(&stub);
   4505       PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   4506       __ test(eax, eax);
   4507       // The stub returns 0 for true.
   4508       Split(zero, if_true, if_false, fall_through);
   4509       break;
   4510     }
   4511 
   4512     default: {
   4513       VisitForAccumulatorValue(expr->right());
   4514       Condition cc = CompareIC::ComputeCondition(op);
   4515       __ pop(edx);
   4516 
   4517       bool inline_smi_code = ShouldInlineSmiCase(op);
   4518       JumpPatchSite patch_site(masm_);
   4519       if (inline_smi_code) {
   4520         Label slow_case;
   4521         __ mov(ecx, edx);
   4522         __ or_(ecx, eax);
   4523         patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
   4524         __ cmp(edx, eax);
   4525         Split(cc, if_true, if_false, NULL);
   4526         __ bind(&slow_case);
   4527       }
   4528 
   4529       // Record position and call the compare IC.
   4530       SetSourcePosition(expr->position());
   4531       Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
   4532       CallIC(ic, expr->CompareOperationFeedbackId());
   4533       patch_site.EmitPatchInfo();
   4534 
   4535       PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   4536       __ test(eax, eax);
   4537       Split(cc, if_true, if_false, fall_through);
   4538     }
   4539   }
   4540 
   4541   // Convert the result of the comparison into one expected for this
   4542   // expression's context.
   4543   context()->Plug(if_true, if_false);
   4544 }
   4545 
   4546 
   4547 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
   4548                                               Expression* sub_expr,
   4549                                               NilValue nil) {
   4550   Label materialize_true, materialize_false;
   4551   Label* if_true = NULL;
   4552   Label* if_false = NULL;
   4553   Label* fall_through = NULL;
   4554   context()->PrepareTest(&materialize_true, &materialize_false,
   4555                          &if_true, &if_false, &fall_through);
   4556 
   4557   VisitForAccumulatorValue(sub_expr);
   4558   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   4559 
   4560   Handle<Object> nil_value = nil == kNullValue
   4561       ? isolate()->factory()->null_value()
   4562       : isolate()->factory()->undefined_value();
   4563   if (expr->op() == Token::EQ_STRICT) {
   4564     __ cmp(eax, nil_value);
   4565     Split(equal, if_true, if_false, fall_through);
   4566   } else {
   4567     Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
   4568     CallIC(ic, expr->CompareOperationFeedbackId());
   4569     __ test(eax, eax);
   4570     Split(not_zero, if_true, if_false, fall_through);
   4571   }
   4572   context()->Plug(if_true, if_false);
   4573 }
   4574 
   4575 
   4576 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
   4577   __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
   4578   context()->Plug(eax);
   4579 }
   4580 
   4581 
   4582 Register FullCodeGenerator::result_register() {
   4583   return eax;
   4584 }
   4585 
   4586 
   4587 Register FullCodeGenerator::context_register() {
   4588   return esi;
   4589 }
   4590 
   4591 
   4592 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
   4593   ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
   4594   __ mov(Operand(ebp, frame_offset), value);
   4595 }
   4596 
   4597 
   4598 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
   4599   __ mov(dst, ContextOperand(esi, context_index));
   4600 }
   4601 
   4602 
   4603 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
   4604   Scope* declaration_scope = scope()->DeclarationScope();
   4605   if (declaration_scope->is_global_scope() ||
   4606       declaration_scope->is_module_scope()) {
   4607     // Contexts nested in the native context have a canonical empty function
   4608     // as their closure, not the anonymous closure containing the global
   4609     // code.  Pass a smi sentinel and let the runtime look up the empty
   4610     // function.
   4611     __ push(Immediate(Smi::FromInt(0)));
   4612   } else if (declaration_scope->is_eval_scope()) {
   4613     // Contexts nested inside eval code have the same closure as the context
   4614     // calling eval, not the anonymous closure containing the eval code.
   4615     // Fetch it from the context.
   4616     __ push(ContextOperand(esi, Context::CLOSURE_INDEX));
   4617   } else {
   4618     ASSERT(declaration_scope->is_function_scope());
   4619     __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
   4620   }
   4621 }
   4622 
   4623 
   4624 // ----------------------------------------------------------------------------
   4625 // Non-local control flow support.
   4626 
   4627 void FullCodeGenerator::EnterFinallyBlock() {
   4628   // Cook return address on top of stack (smi encoded Code* delta)
   4629   ASSERT(!result_register().is(edx));
   4630   __ pop(edx);
   4631   __ sub(edx, Immediate(masm_->CodeObject()));
   4632   STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
   4633   STATIC_ASSERT(kSmiTag == 0);
   4634   __ SmiTag(edx);
   4635   __ push(edx);
   4636 
   4637   // Store result register while executing finally block.
   4638   __ push(result_register());
   4639 
   4640   // Store pending message while executing finally block.
   4641   ExternalReference pending_message_obj =
   4642       ExternalReference::address_of_pending_message_obj(isolate());
   4643   __ mov(edx, Operand::StaticVariable(pending_message_obj));
   4644   __ push(edx);
   4645 
   4646   ExternalReference has_pending_message =
   4647       ExternalReference::address_of_has_pending_message(isolate());
   4648   __ mov(edx, Operand::StaticVariable(has_pending_message));
   4649   __ SmiTag(edx);
   4650   __ push(edx);
   4651 
   4652   ExternalReference pending_message_script =
   4653       ExternalReference::address_of_pending_message_script(isolate());
   4654   __ mov(edx, Operand::StaticVariable(pending_message_script));
   4655   __ push(edx);
   4656 }
   4657 
   4658 
   4659 void FullCodeGenerator::ExitFinallyBlock() {
   4660   ASSERT(!result_register().is(edx));
   4661   // Restore pending message from stack.
   4662   __ pop(edx);
   4663   ExternalReference pending_message_script =
   4664       ExternalReference::address_of_pending_message_script(isolate());
   4665   __ mov(Operand::StaticVariable(pending_message_script), edx);
   4666 
   4667   __ pop(edx);
   4668   __ SmiUntag(edx);
   4669   ExternalReference has_pending_message =
   4670       ExternalReference::address_of_has_pending_message(isolate());
   4671   __ mov(Operand::StaticVariable(has_pending_message), edx);
   4672 
   4673   __ pop(edx);
   4674   ExternalReference pending_message_obj =
   4675       ExternalReference::address_of_pending_message_obj(isolate());
   4676   __ mov(Operand::StaticVariable(pending_message_obj), edx);
   4677 
   4678   // Restore result register from stack.
   4679   __ pop(result_register());
   4680 
   4681   // Uncook return address.
   4682   __ pop(edx);
   4683   __ SmiUntag(edx);
   4684   __ add(edx, Immediate(masm_->CodeObject()));
   4685   __ jmp(edx);
   4686 }
   4687 
   4688 
   4689 #undef __
   4690 
   4691 #define __ ACCESS_MASM(masm())
   4692 
   4693 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
   4694     int* stack_depth,
   4695     int* context_length) {
   4696   // The macros used here must preserve the result register.
   4697 
   4698   // Because the handler block contains the context of the finally
   4699   // code, we can restore it directly from there for the finally code
   4700   // rather than iteratively unwinding contexts via their previous
   4701   // links.
   4702   __ Drop(*stack_depth);  // Down to the handler block.
   4703   if (*context_length > 0) {
   4704     // Restore the context to its dedicated register and the stack.
   4705     __ mov(esi, Operand(esp, StackHandlerConstants::kContextOffset));
   4706     __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
   4707   }
   4708   __ PopTryHandler();
   4709   __ call(finally_entry_);
   4710 
   4711   *stack_depth = 0;
   4712   *context_length = 0;
   4713   return previous_;
   4714 }
   4715 
   4716 #undef __
   4717 
   4718 
   4719 static const byte kJnsInstruction = 0x79;
   4720 static const byte kJnsOffset = 0x11;
   4721 static const byte kNopByteOne = 0x66;
   4722 static const byte kNopByteTwo = 0x90;
   4723 #ifdef DEBUG
   4724 static const byte kCallInstruction = 0xe8;
   4725 #endif
   4726 
   4727 
   4728 void BackEdgeTable::PatchAt(Code* unoptimized_code,
   4729                             Address pc,
   4730                             BackEdgeState target_state,
   4731                             Code* replacement_code) {
   4732   Address call_target_address = pc - kIntSize;
   4733   Address jns_instr_address = call_target_address - 3;
   4734   Address jns_offset_address = call_target_address - 2;
   4735 
   4736   switch (target_state) {
   4737     case INTERRUPT:
   4738       //     sub <profiling_counter>, <delta>  ;; Not changed
   4739       //     jns ok
   4740       //     call <interrupt stub>
   4741       //   ok:
   4742       *jns_instr_address = kJnsInstruction;
   4743       *jns_offset_address = kJnsOffset;
   4744       break;
   4745     case ON_STACK_REPLACEMENT:
   4746     case OSR_AFTER_STACK_CHECK:
   4747       //     sub <profiling_counter>, <delta>  ;; Not changed
   4748       //     nop
   4749       //     nop
   4750       //     call <on-stack replacment>
   4751       //   ok:
   4752       *jns_instr_address = kNopByteOne;
   4753       *jns_offset_address = kNopByteTwo;
   4754       break;
   4755   }
   4756 
   4757   Assembler::set_target_address_at(call_target_address,
   4758                                    unoptimized_code,
   4759                                    replacement_code->entry());
   4760   unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
   4761       unoptimized_code, call_target_address, replacement_code);
   4762 }
   4763 
   4764 
   4765 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
   4766     Isolate* isolate,
   4767     Code* unoptimized_code,
   4768     Address pc) {
   4769   Address call_target_address = pc - kIntSize;
   4770   Address jns_instr_address = call_target_address - 3;
   4771   ASSERT_EQ(kCallInstruction, *(call_target_address - 1));
   4772 
   4773   if (*jns_instr_address == kJnsInstruction) {
   4774     ASSERT_EQ(kJnsOffset, *(call_target_address - 2));
   4775     ASSERT_EQ(isolate->builtins()->InterruptCheck()->entry(),
   4776               Assembler::target_address_at(call_target_address,
   4777                                            unoptimized_code));
   4778     return INTERRUPT;
   4779   }
   4780 
   4781   ASSERT_EQ(kNopByteOne, *jns_instr_address);
   4782   ASSERT_EQ(kNopByteTwo, *(call_target_address - 2));
   4783 
   4784   if (Assembler::target_address_at(call_target_address, unoptimized_code) ==
   4785       isolate->builtins()->OnStackReplacement()->entry()) {
   4786     return ON_STACK_REPLACEMENT;
   4787   }
   4788 
   4789   ASSERT_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(),
   4790             Assembler::target_address_at(call_target_address,
   4791                                          unoptimized_code));
   4792   return OSR_AFTER_STACK_CHECK;
   4793 }
   4794 
   4795 
   4796 } }  // namespace v8::internal
   4797 
   4798 #endif  // V8_TARGET_ARCH_IA32
   4799