Home | History | Annotate | Download | only in ia32
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #if V8_TARGET_ARCH_IA32
      6 
      7 #include "src/ast/compile-time-value.h"
      8 #include "src/ast/scopes.h"
      9 #include "src/builtins/builtins-constructor.h"
     10 #include "src/code-factory.h"
     11 #include "src/code-stubs.h"
     12 #include "src/codegen.h"
     13 #include "src/compilation-info.h"
     14 #include "src/compiler.h"
     15 #include "src/debug/debug.h"
     16 #include "src/full-codegen/full-codegen.h"
     17 #include "src/ia32/frames-ia32.h"
     18 #include "src/ic/ic.h"
     19 
     20 namespace v8 {
     21 namespace internal {
     22 
     23 #define __ ACCESS_MASM(masm())
     24 
     25 class JumpPatchSite BASE_EMBEDDED {
     26  public:
     27   explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
     28 #ifdef DEBUG
     29     info_emitted_ = false;
     30 #endif
     31   }
     32 
     33   ~JumpPatchSite() {
     34     DCHECK(patch_site_.is_bound() == info_emitted_);
     35   }
     36 
     37   void EmitJumpIfNotSmi(Register reg,
     38                         Label* target,
     39                         Label::Distance distance = Label::kFar) {
     40     __ test(reg, Immediate(kSmiTagMask));
     41     EmitJump(not_carry, target, distance);  // Always taken before patched.
     42   }
     43 
     44   void EmitJumpIfSmi(Register reg,
     45                      Label* target,
     46                      Label::Distance distance = Label::kFar) {
     47     __ test(reg, Immediate(kSmiTagMask));
     48     EmitJump(carry, target, distance);  // Never taken before patched.
     49   }
     50 
     51   void EmitPatchInfo() {
     52     if (patch_site_.is_bound()) {
     53       int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
     54       DCHECK(is_uint8(delta_to_patch_site));
     55       __ test(eax, Immediate(delta_to_patch_site));
     56 #ifdef DEBUG
     57       info_emitted_ = true;
     58 #endif
     59     } else {
     60       __ nop();  // Signals no inlined code.
     61     }
     62   }
     63 
     64  private:
     65   // jc will be patched with jz, jnc will become jnz.
     66   void EmitJump(Condition cc, Label* target, Label::Distance distance) {
     67     DCHECK(!patch_site_.is_bound() && !info_emitted_);
     68     DCHECK(cc == carry || cc == not_carry);
     69     __ bind(&patch_site_);
     70     __ j(cc, target, distance);
     71   }
     72 
     73   MacroAssembler* masm() { return masm_; }
     74   MacroAssembler* masm_;
     75   Label patch_site_;
     76 #ifdef DEBUG
     77   bool info_emitted_;
     78 #endif
     79 };
     80 
     81 
     82 // Generate code for a JS function.  On entry to the function the receiver
     83 // and arguments have been pushed on the stack left to right, with the
     84 // return address on top of them.  The actual argument count matches the
     85 // formal parameter count expected by the function.
     86 //
     87 // The live registers are:
     88 //   o edi: the JS function object being called (i.e. ourselves)
     89 //   o edx: the new target value
     90 //   o esi: our context
     91 //   o ebp: our caller's frame pointer
     92 //   o esp: stack pointer (pointing to return address)
     93 //
     94 // The function builds a JS frame.  Please see JavaScriptFrameConstants in
     95 // frames-ia32.h for its layout.
     96 void FullCodeGenerator::Generate() {
     97   CompilationInfo* info = info_;
     98   profiling_counter_ = isolate()->factory()->NewCell(
     99       Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
    100   SetFunctionPosition(literal());
    101   Comment cmnt(masm_, "[ function compiled by full code generator");
    102 
    103   ProfileEntryHookStub::MaybeCallEntryHook(masm_);
    104 
    105   if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
    106     int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize;
    107     __ mov(ecx, Operand(esp, receiver_offset));
    108     __ AssertNotSmi(ecx);
    109     __ CmpObjectType(ecx, FIRST_JS_RECEIVER_TYPE, ecx);
    110     __ Assert(above_equal, kSloppyFunctionExpectsJSReceiverReceiver);
    111   }
    112 
    113   // Open a frame scope to indicate that there is a frame on the stack.  The
    114   // MANUAL indicates that the scope shouldn't actually generate code to set up
    115   // the frame (that is done below).
    116   FrameScope frame_scope(masm_, StackFrame::MANUAL);
    117 
    118   info->set_prologue_offset(masm_->pc_offset());
    119   __ Prologue(info->GeneratePreagedPrologue());
    120 
    121   // Increment invocation count for the function.
    122   {
    123     Comment cmnt(masm_, "[ Increment invocation count");
    124     __ mov(ecx, FieldOperand(edi, JSFunction::kFeedbackVectorOffset));
    125     __ mov(ecx, FieldOperand(ecx, Cell::kValueOffset));
    126     __ add(
    127         FieldOperand(ecx, FeedbackVector::kInvocationCountIndex * kPointerSize +
    128                               FeedbackVector::kHeaderSize),
    129         Immediate(Smi::FromInt(1)));
    130   }
    131 
    132   { Comment cmnt(masm_, "[ Allocate locals");
    133     int locals_count = info->scope()->num_stack_slots();
    134     OperandStackDepthIncrement(locals_count);
    135     if (locals_count == 1) {
    136       __ push(Immediate(isolate()->factory()->undefined_value()));
    137     } else if (locals_count > 1) {
    138       if (locals_count >= 128) {
    139         Label ok;
    140         __ mov(ecx, esp);
    141         __ sub(ecx, Immediate(locals_count * kPointerSize));
    142         ExternalReference stack_limit =
    143             ExternalReference::address_of_real_stack_limit(isolate());
    144         __ cmp(ecx, Operand::StaticVariable(stack_limit));
    145         __ j(above_equal, &ok, Label::kNear);
    146         __ CallRuntime(Runtime::kThrowStackOverflow);
    147         __ bind(&ok);
    148       }
    149       __ mov(eax, Immediate(isolate()->factory()->undefined_value()));
    150       const int kMaxPushes = 32;
    151       if (locals_count >= kMaxPushes) {
    152         int loop_iterations = locals_count / kMaxPushes;
    153         __ mov(ecx, loop_iterations);
    154         Label loop_header;
    155         __ bind(&loop_header);
    156         // Do pushes.
    157         for (int i = 0; i < kMaxPushes; i++) {
    158           __ push(eax);
    159         }
    160         __ dec(ecx);
    161         __ j(not_zero, &loop_header, Label::kNear);
    162       }
    163       int remaining = locals_count % kMaxPushes;
    164       // Emit the remaining pushes.
    165       for (int i  = 0; i < remaining; i++) {
    166         __ push(eax);
    167       }
    168     }
    169   }
    170 
    171   bool function_in_register = true;
    172 
    173   // Possibly allocate a local context.
    174   if (info->scope()->NeedsContext()) {
    175     Comment cmnt(masm_, "[ Allocate context");
    176     bool need_write_barrier = true;
    177     int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
    178     // Argument to NewContext is the function, which is still in edi.
    179     if (info->scope()->is_script_scope()) {
    180       __ push(edi);
    181       __ Push(info->scope()->scope_info());
    182       __ CallRuntime(Runtime::kNewScriptContext);
    183       PrepareForBailoutForId(BailoutId::ScriptContext(),
    184                              BailoutState::TOS_REGISTER);
    185       // The new target value is not used, clobbering is safe.
    186       DCHECK_NULL(info->scope()->new_target_var());
    187     } else {
    188       if (info->scope()->new_target_var() != nullptr) {
    189         __ push(edx);  // Preserve new target.
    190       }
    191       if (slots <=
    192           ConstructorBuiltinsAssembler::MaximumFunctionContextSlots()) {
    193         Callable callable = CodeFactory::FastNewFunctionContext(
    194             isolate(), info->scope()->scope_type());
    195         __ mov(FastNewFunctionContextDescriptor::SlotsRegister(),
    196                Immediate(slots));
    197         __ Call(callable.code(), RelocInfo::CODE_TARGET);
    198         // Result of the FastNewFunctionContext builtin is always in new space.
    199         need_write_barrier = false;
    200       } else {
    201         __ push(edi);
    202         __ Push(Smi::FromInt(info->scope()->scope_type()));
    203         __ CallRuntime(Runtime::kNewFunctionContext);
    204       }
    205       if (info->scope()->new_target_var() != nullptr) {
    206         __ pop(edx);  // Restore new target.
    207       }
    208     }
    209     function_in_register = false;
    210     // Context is returned in eax.  It replaces the context passed to us.
    211     // It's saved in the stack and kept live in esi.
    212     __ mov(esi, eax);
    213     __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), eax);
    214 
    215     // Copy parameters into context if necessary.
    216     int num_parameters = info->scope()->num_parameters();
    217     int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
    218     for (int i = first_parameter; i < num_parameters; i++) {
    219       Variable* var =
    220           (i == -1) ? info->scope()->receiver() : info->scope()->parameter(i);
    221       if (var->IsContextSlot()) {
    222         int parameter_offset = StandardFrameConstants::kCallerSPOffset +
    223             (num_parameters - 1 - i) * kPointerSize;
    224         // Load parameter from stack.
    225         __ mov(eax, Operand(ebp, parameter_offset));
    226         // Store it in the context.
    227         int context_offset = Context::SlotOffset(var->index());
    228         __ mov(Operand(esi, context_offset), eax);
    229         // Update the write barrier. This clobbers eax and ebx.
    230         if (need_write_barrier) {
    231           __ RecordWriteContextSlot(esi,
    232                                     context_offset,
    233                                     eax,
    234                                     ebx,
    235                                     kDontSaveFPRegs);
    236         } else if (FLAG_debug_code) {
    237           Label done;
    238           __ JumpIfInNewSpace(esi, eax, &done, Label::kNear);
    239           __ Abort(kExpectedNewSpaceObject);
    240           __ bind(&done);
    241         }
    242       }
    243     }
    244   }
    245 
    246   // Register holding this function and new target are both trashed in case we
    247   // bailout here. But since that can happen only when new target is not used
    248   // and we allocate a context, the value of |function_in_register| is correct.
    249   PrepareForBailoutForId(BailoutId::FunctionContext(),
    250                          BailoutState::NO_REGISTERS);
    251 
    252   // We don't support new.target and rest parameters here.
    253   DCHECK_NULL(info->scope()->new_target_var());
    254   DCHECK_NULL(info->scope()->rest_parameter());
    255   DCHECK_NULL(info->scope()->this_function_var());
    256 
    257   Variable* arguments = info->scope()->arguments();
    258   if (arguments != NULL) {
    259     // Arguments object must be allocated after the context object, in
    260     // case the "arguments" or ".arguments" variables are in the context.
    261     Comment cmnt(masm_, "[ Allocate arguments object");
    262     if (!function_in_register) {
    263       __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
    264     }
    265     if (is_strict(language_mode()) || !has_simple_parameters()) {
    266       __ call(isolate()->builtins()->FastNewStrictArguments(),
    267               RelocInfo::CODE_TARGET);
    268       RestoreContext();
    269     } else if (literal()->has_duplicate_parameters()) {
    270       __ Push(edi);
    271       __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
    272     } else {
    273       __ call(isolate()->builtins()->FastNewSloppyArguments(),
    274               RelocInfo::CODE_TARGET);
    275       RestoreContext();
    276     }
    277 
    278     SetVar(arguments, eax, ebx, edx);
    279   }
    280 
    281   if (FLAG_trace) {
    282     __ CallRuntime(Runtime::kTraceEnter);
    283   }
    284 
    285   // Visit the declarations and body.
    286   PrepareForBailoutForId(BailoutId::FunctionEntry(),
    287                          BailoutState::NO_REGISTERS);
    288   {
    289     Comment cmnt(masm_, "[ Declarations");
    290     VisitDeclarations(info->scope()->declarations());
    291   }
    292 
    293   // Assert that the declarations do not use ICs. Otherwise the debugger
    294   // won't be able to redirect a PC at an IC to the correct IC in newly
    295   // recompiled code.
    296   DCHECK_EQ(0, ic_total_count_);
    297 
    298   {
    299     Comment cmnt(masm_, "[ Stack check");
    300     PrepareForBailoutForId(BailoutId::Declarations(),
    301                            BailoutState::NO_REGISTERS);
    302     Label ok;
    303     ExternalReference stack_limit =
    304         ExternalReference::address_of_stack_limit(isolate());
    305     __ cmp(esp, Operand::StaticVariable(stack_limit));
    306     __ j(above_equal, &ok, Label::kNear);
    307     __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
    308     __ bind(&ok);
    309   }
    310 
    311   {
    312     Comment cmnt(masm_, "[ Body");
    313     DCHECK(loop_depth() == 0);
    314     VisitStatements(literal()->body());
    315     DCHECK(loop_depth() == 0);
    316   }
    317 
    318   // Always emit a 'return undefined' in case control fell off the end of
    319   // the body.
    320   { Comment cmnt(masm_, "[ return <undefined>;");
    321     __ mov(eax, isolate()->factory()->undefined_value());
    322     EmitReturnSequence();
    323   }
    324 }
    325 
    326 
    327 void FullCodeGenerator::ClearAccumulator() {
    328   __ Move(eax, Immediate(Smi::kZero));
    329 }
    330 
    331 
    332 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
    333   __ mov(ebx, Immediate(profiling_counter_));
    334   __ sub(FieldOperand(ebx, Cell::kValueOffset),
    335          Immediate(Smi::FromInt(delta)));
    336 }
    337 
    338 
    339 void FullCodeGenerator::EmitProfilingCounterReset() {
    340   int reset_value = FLAG_interrupt_budget;
    341   __ mov(ebx, Immediate(profiling_counter_));
    342   __ mov(FieldOperand(ebx, Cell::kValueOffset),
    343          Immediate(Smi::FromInt(reset_value)));
    344 }
    345 
    346 
    347 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
    348                                                 Label* back_edge_target) {
    349   Comment cmnt(masm_, "[ Back edge bookkeeping");
    350   Label ok;
    351 
    352   DCHECK(back_edge_target->is_bound());
    353   int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
    354   int weight = Min(kMaxBackEdgeWeight,
    355                    Max(1, distance / kCodeSizeMultiplier));
    356   EmitProfilingCounterDecrement(weight);
    357   __ j(positive, &ok, Label::kNear);
    358   __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
    359 
    360   // Record a mapping of this PC offset to the OSR id.  This is used to find
    361   // the AST id from the unoptimized code in order to use it as a key into
    362   // the deoptimization input data found in the optimized code.
    363   RecordBackEdge(stmt->OsrEntryId());
    364 
    365   EmitProfilingCounterReset();
    366 
    367   __ bind(&ok);
    368   PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
    369   // Record a mapping of the OSR id to this PC.  This is used if the OSR
    370   // entry becomes the target of a bailout.  We don't expect it to be, but
    371   // we want it to work if it is.
    372   PrepareForBailoutForId(stmt->OsrEntryId(), BailoutState::NO_REGISTERS);
    373 }
    374 
    375 void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
    376     bool is_tail_call) {
    377   // Pretend that the exit is a backwards jump to the entry.
    378   int weight = 1;
    379   if (info_->ShouldSelfOptimize()) {
    380     weight = FLAG_interrupt_budget / FLAG_self_opt_count;
    381   } else {
    382     int distance = masm_->pc_offset();
    383     weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
    384   }
    385   EmitProfilingCounterDecrement(weight);
    386   Label ok;
    387   __ j(positive, &ok, Label::kNear);
    388   // Don't need to save result register if we are going to do a tail call.
    389   if (!is_tail_call) {
    390     __ push(eax);
    391   }
    392   __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
    393   if (!is_tail_call) {
    394     __ pop(eax);
    395   }
    396   EmitProfilingCounterReset();
    397   __ bind(&ok);
    398 }
    399 
    400 void FullCodeGenerator::EmitReturnSequence() {
    401   Comment cmnt(masm_, "[ Return sequence");
    402   if (return_label_.is_bound()) {
    403     __ jmp(&return_label_);
    404   } else {
    405     // Common return label
    406     __ bind(&return_label_);
    407     if (FLAG_trace) {
    408       __ push(eax);
    409       __ CallRuntime(Runtime::kTraceExit);
    410     }
    411     EmitProfilingCounterHandlingForReturnSequence(false);
    412 
    413     SetReturnPosition(literal());
    414     __ leave();
    415 
    416     int arg_count = info_->scope()->num_parameters() + 1;
    417     int arguments_bytes = arg_count * kPointerSize;
    418     __ Ret(arguments_bytes, ecx);
    419   }
    420 }
    421 
    422 void FullCodeGenerator::RestoreContext() {
    423   __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
    424 }
    425 
    426 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
    427   DCHECK(var->IsStackAllocated() || var->IsContextSlot());
    428   MemOperand operand = codegen()->VarOperand(var, result_register());
    429   // Memory operands can be pushed directly.
    430   codegen()->PushOperand(operand);
    431 }
    432 
    433 
    434 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
    435   UNREACHABLE();  // Not used on IA32.
    436 }
    437 
    438 
    439 void FullCodeGenerator::AccumulatorValueContext::Plug(
    440     Heap::RootListIndex index) const {
    441   UNREACHABLE();  // Not used on IA32.
    442 }
    443 
    444 
    445 void FullCodeGenerator::StackValueContext::Plug(
    446     Heap::RootListIndex index) const {
    447   UNREACHABLE();  // Not used on IA32.
    448 }
    449 
    450 
    451 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
    452   UNREACHABLE();  // Not used on IA32.
    453 }
    454 
    455 
    456 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
    457 }
    458 
    459 
    460 void FullCodeGenerator::AccumulatorValueContext::Plug(
    461     Handle<Object> lit) const {
    462   if (lit->IsSmi()) {
    463     __ SafeMove(result_register(), Immediate(lit));
    464   } else {
    465     __ Move(result_register(), Immediate(lit));
    466   }
    467 }
    468 
    469 
    470 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
    471   codegen()->OperandStackDepthIncrement(1);
    472   if (lit->IsSmi()) {
    473     __ SafePush(Immediate(lit));
    474   } else {
    475     __ push(Immediate(lit));
    476   }
    477 }
    478 
    479 
    480 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
    481   codegen()->PrepareForBailoutBeforeSplit(condition(),
    482                                           true,
    483                                           true_label_,
    484                                           false_label_);
    485   DCHECK(lit->IsNullOrUndefined(isolate()) || !lit->IsUndetectable());
    486   if (lit->IsNullOrUndefined(isolate()) || lit->IsFalse(isolate())) {
    487     if (false_label_ != fall_through_) __ jmp(false_label_);
    488   } else if (lit->IsTrue(isolate()) || lit->IsJSObject()) {
    489     if (true_label_ != fall_through_) __ jmp(true_label_);
    490   } else if (lit->IsString()) {
    491     if (String::cast(*lit)->length() == 0) {
    492       if (false_label_ != fall_through_) __ jmp(false_label_);
    493     } else {
    494       if (true_label_ != fall_through_) __ jmp(true_label_);
    495     }
    496   } else if (lit->IsSmi()) {
    497     if (Smi::cast(*lit)->value() == 0) {
    498       if (false_label_ != fall_through_) __ jmp(false_label_);
    499     } else {
    500       if (true_label_ != fall_through_) __ jmp(true_label_);
    501     }
    502   } else {
    503     // For simplicity we always test the accumulator register.
    504     __ mov(result_register(), lit);
    505     codegen()->DoTest(this);
    506   }
    507 }
    508 
    509 
    510 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
    511                                                        Register reg) const {
    512   DCHECK(count > 0);
    513   if (count > 1) codegen()->DropOperands(count - 1);
    514   __ mov(Operand(esp, 0), reg);
    515 }
    516 
    517 
    518 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
    519                                             Label* materialize_false) const {
    520   DCHECK(materialize_true == materialize_false);
    521   __ bind(materialize_true);
    522 }
    523 
    524 
    525 void FullCodeGenerator::AccumulatorValueContext::Plug(
    526     Label* materialize_true,
    527     Label* materialize_false) const {
    528   Label done;
    529   __ bind(materialize_true);
    530   __ mov(result_register(), isolate()->factory()->true_value());
    531   __ jmp(&done, Label::kNear);
    532   __ bind(materialize_false);
    533   __ mov(result_register(), isolate()->factory()->false_value());
    534   __ bind(&done);
    535 }
    536 
    537 
    538 void FullCodeGenerator::StackValueContext::Plug(
    539     Label* materialize_true,
    540     Label* materialize_false) const {
    541   codegen()->OperandStackDepthIncrement(1);
    542   Label done;
    543   __ bind(materialize_true);
    544   __ push(Immediate(isolate()->factory()->true_value()));
    545   __ jmp(&done, Label::kNear);
    546   __ bind(materialize_false);
    547   __ push(Immediate(isolate()->factory()->false_value()));
    548   __ bind(&done);
    549 }
    550 
    551 
    552 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
    553                                           Label* materialize_false) const {
    554   DCHECK(materialize_true == true_label_);
    555   DCHECK(materialize_false == false_label_);
    556 }
    557 
    558 
    559 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
    560   Handle<Object> value = flag
    561       ? isolate()->factory()->true_value()
    562       : isolate()->factory()->false_value();
    563   __ mov(result_register(), value);
    564 }
    565 
    566 
    567 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
    568   codegen()->OperandStackDepthIncrement(1);
    569   Handle<Object> value = flag
    570       ? isolate()->factory()->true_value()
    571       : isolate()->factory()->false_value();
    572   __ push(Immediate(value));
    573 }
    574 
    575 
    576 void FullCodeGenerator::TestContext::Plug(bool flag) const {
    577   codegen()->PrepareForBailoutBeforeSplit(condition(),
    578                                           true,
    579                                           true_label_,
    580                                           false_label_);
    581   if (flag) {
    582     if (true_label_ != fall_through_) __ jmp(true_label_);
    583   } else {
    584     if (false_label_ != fall_through_) __ jmp(false_label_);
    585   }
    586 }
    587 
    588 
    589 void FullCodeGenerator::DoTest(Expression* condition,
    590                                Label* if_true,
    591                                Label* if_false,
    592                                Label* fall_through) {
    593   Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate());
    594   CallIC(ic, condition->test_id());
    595   __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
    596   Split(equal, if_true, if_false, fall_through);
    597 }
    598 
    599 
    600 void FullCodeGenerator::Split(Condition cc,
    601                               Label* if_true,
    602                               Label* if_false,
    603                               Label* fall_through) {
    604   if (if_false == fall_through) {
    605     __ j(cc, if_true);
    606   } else if (if_true == fall_through) {
    607     __ j(NegateCondition(cc), if_false);
    608   } else {
    609     __ j(cc, if_true);
    610     __ jmp(if_false);
    611   }
    612 }
    613 
    614 
    615 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
    616   DCHECK(var->IsStackAllocated());
    617   // Offset is negative because higher indexes are at lower addresses.
    618   int offset = -var->index() * kPointerSize;
    619   // Adjust by a (parameter or local) base offset.
    620   if (var->IsParameter()) {
    621     offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
    622   } else {
    623     offset += JavaScriptFrameConstants::kLocal0Offset;
    624   }
    625   return Operand(ebp, offset);
    626 }
    627 
    628 
    629 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
    630   DCHECK(var->IsContextSlot() || var->IsStackAllocated());
    631   if (var->IsContextSlot()) {
    632     int context_chain_length = scope()->ContextChainLength(var->scope());
    633     __ LoadContext(scratch, context_chain_length);
    634     return ContextOperand(scratch, var->index());
    635   } else {
    636     return StackOperand(var);
    637   }
    638 }
    639 
    640 
    641 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
    642   DCHECK(var->IsContextSlot() || var->IsStackAllocated());
    643   MemOperand location = VarOperand(var, dest);
    644   __ mov(dest, location);
    645 }
    646 
    647 
    648 void FullCodeGenerator::SetVar(Variable* var,
    649                                Register src,
    650                                Register scratch0,
    651                                Register scratch1) {
    652   DCHECK(var->IsContextSlot() || var->IsStackAllocated());
    653   DCHECK(!scratch0.is(src));
    654   DCHECK(!scratch0.is(scratch1));
    655   DCHECK(!scratch1.is(src));
    656   MemOperand location = VarOperand(var, scratch0);
    657   __ mov(location, src);
    658 
    659   // Emit the write barrier code if the location is in the heap.
    660   if (var->IsContextSlot()) {
    661     int offset = Context::SlotOffset(var->index());
    662     DCHECK(!scratch0.is(esi) && !src.is(esi) && !scratch1.is(esi));
    663     __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
    664   }
    665 }
    666 
    667 
    668 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
    669                                                      bool should_normalize,
    670                                                      Label* if_true,
    671                                                      Label* if_false) {
    672   // Only prepare for bailouts before splits if we're in a test
    673   // context. Otherwise, we let the Visit function deal with the
    674   // preparation to avoid preparing with the same AST id twice.
    675   if (!context()->IsTest()) return;
    676 
    677   Label skip;
    678   if (should_normalize) __ jmp(&skip, Label::kNear);
    679   PrepareForBailout(expr, BailoutState::TOS_REGISTER);
    680   if (should_normalize) {
    681     __ cmp(eax, isolate()->factory()->true_value());
    682     Split(equal, if_true, if_false, NULL);
    683     __ bind(&skip);
    684   }
    685 }
    686 
    687 
    688 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
    689   // The variable in the declaration always resides in the current context.
    690   DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
    691   if (FLAG_debug_code) {
    692     // Check that we're not inside a with or catch context.
    693     __ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset));
    694     __ cmp(ebx, isolate()->factory()->with_context_map());
    695     __ Check(not_equal, kDeclarationInWithContext);
    696     __ cmp(ebx, isolate()->factory()->catch_context_map());
    697     __ Check(not_equal, kDeclarationInCatchContext);
    698   }
    699 }
    700 
    701 
    702 void FullCodeGenerator::VisitVariableDeclaration(
    703     VariableDeclaration* declaration) {
    704   VariableProxy* proxy = declaration->proxy();
    705   Variable* variable = proxy->var();
    706   switch (variable->location()) {
    707     case VariableLocation::UNALLOCATED: {
    708       DCHECK(!variable->binding_needs_init());
    709       globals_->Add(variable->name(), zone());
    710       FeedbackSlot slot = proxy->VariableFeedbackSlot();
    711       DCHECK(!slot.IsInvalid());
    712       globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
    713       globals_->Add(isolate()->factory()->undefined_value(), zone());
    714       globals_->Add(isolate()->factory()->undefined_value(), zone());
    715       break;
    716     }
    717     case VariableLocation::PARAMETER:
    718     case VariableLocation::LOCAL:
    719       if (variable->binding_needs_init()) {
    720         Comment cmnt(masm_, "[ VariableDeclaration");
    721         __ mov(StackOperand(variable),
    722                Immediate(isolate()->factory()->the_hole_value()));
    723       }
    724       break;
    725 
    726     case VariableLocation::CONTEXT:
    727       if (variable->binding_needs_init()) {
    728         Comment cmnt(masm_, "[ VariableDeclaration");
    729         EmitDebugCheckDeclarationContext(variable);
    730         __ mov(ContextOperand(esi, variable->index()),
    731                Immediate(isolate()->factory()->the_hole_value()));
    732         // No write barrier since the hole value is in old space.
    733         PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
    734       }
    735       break;
    736 
    737     case VariableLocation::LOOKUP:
    738     case VariableLocation::MODULE:
    739       UNREACHABLE();
    740   }
    741 }
    742 
    743 
    744 void FullCodeGenerator::VisitFunctionDeclaration(
    745     FunctionDeclaration* declaration) {
    746   VariableProxy* proxy = declaration->proxy();
    747   Variable* variable = proxy->var();
    748   switch (variable->location()) {
    749     case VariableLocation::UNALLOCATED: {
    750       globals_->Add(variable->name(), zone());
    751       FeedbackSlot slot = proxy->VariableFeedbackSlot();
    752       DCHECK(!slot.IsInvalid());
    753       globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
    754 
    755       // We need the slot where the literals array lives, too.
    756       slot = declaration->fun()->LiteralFeedbackSlot();
    757       DCHECK(!slot.IsInvalid());
    758       globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
    759 
    760       Handle<SharedFunctionInfo> function =
    761           Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
    762       // Check for stack-overflow exception.
    763       if (function.is_null()) return SetStackOverflow();
    764       globals_->Add(function, zone());
    765       break;
    766     }
    767 
    768     case VariableLocation::PARAMETER:
    769     case VariableLocation::LOCAL: {
    770       Comment cmnt(masm_, "[ FunctionDeclaration");
    771       VisitForAccumulatorValue(declaration->fun());
    772       __ mov(StackOperand(variable), result_register());
    773       break;
    774     }
    775 
    776     case VariableLocation::CONTEXT: {
    777       Comment cmnt(masm_, "[ FunctionDeclaration");
    778       EmitDebugCheckDeclarationContext(variable);
    779       VisitForAccumulatorValue(declaration->fun());
    780       __ mov(ContextOperand(esi, variable->index()), result_register());
    781       // We know that we have written a function, which is not a smi.
    782       __ RecordWriteContextSlot(esi,
    783                                 Context::SlotOffset(variable->index()),
    784                                 result_register(),
    785                                 ecx,
    786                                 kDontSaveFPRegs,
    787                                 EMIT_REMEMBERED_SET,
    788                                 OMIT_SMI_CHECK);
    789       PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
    790       break;
    791     }
    792 
    793     case VariableLocation::LOOKUP:
    794     case VariableLocation::MODULE:
    795       UNREACHABLE();
    796   }
    797 }
    798 
    799 
    800 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
    801   // Call the runtime to declare the globals.
    802   __ Push(pairs);
    803   __ Push(Smi::FromInt(DeclareGlobalsFlags()));
    804   __ EmitLoadFeedbackVector(eax);
    805   __ Push(eax);
    806   __ CallRuntime(Runtime::kDeclareGlobals);
    807   // Return value is ignored.
    808 }
    809 
    810 
    811 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
    812   Comment cmnt(masm_, "[ SwitchStatement");
    813   Breakable nested_statement(this, stmt);
    814   SetStatementPosition(stmt);
    815 
    816   // Keep the switch value on the stack until a case matches.
    817   VisitForStackValue(stmt->tag());
    818   PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
    819 
    820   ZoneList<CaseClause*>* clauses = stmt->cases();
    821   CaseClause* default_clause = NULL;  // Can occur anywhere in the list.
    822 
    823   Label next_test;  // Recycled for each test.
    824   // Compile all the tests with branches to their bodies.
    825   for (int i = 0; i < clauses->length(); i++) {
    826     CaseClause* clause = clauses->at(i);
    827     clause->body_target()->Unuse();
    828 
    829     // The default is not a test, but remember it as final fall through.
    830     if (clause->is_default()) {
    831       default_clause = clause;
    832       continue;
    833     }
    834 
    835     Comment cmnt(masm_, "[ Case comparison");
    836     __ bind(&next_test);
    837     next_test.Unuse();
    838 
    839     // Compile the label expression.
    840     VisitForAccumulatorValue(clause->label());
    841 
    842     // Perform the comparison as if via '==='.
    843     __ mov(edx, Operand(esp, 0));  // Switch value.
    844     bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
    845     JumpPatchSite patch_site(masm_);
    846     if (inline_smi_code) {
    847       Label slow_case;
    848       __ mov(ecx, edx);
    849       __ or_(ecx, eax);
    850       patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
    851 
    852       __ cmp(edx, eax);
    853       __ j(not_equal, &next_test);
    854       __ Drop(1);  // Switch value is no longer needed.
    855       __ jmp(clause->body_target());
    856       __ bind(&slow_case);
    857     }
    858 
    859     SetExpressionPosition(clause);
    860     Handle<Code> ic =
    861         CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
    862     CallIC(ic, clause->CompareId());
    863     patch_site.EmitPatchInfo();
    864 
    865     Label skip;
    866     __ jmp(&skip, Label::kNear);
    867     PrepareForBailout(clause, BailoutState::TOS_REGISTER);
    868     __ cmp(eax, isolate()->factory()->true_value());
    869     __ j(not_equal, &next_test);
    870     __ Drop(1);
    871     __ jmp(clause->body_target());
    872     __ bind(&skip);
    873 
    874     __ test(eax, eax);
    875     __ j(not_equal, &next_test);
    876     __ Drop(1);  // Switch value is no longer needed.
    877     __ jmp(clause->body_target());
    878   }
    879 
    880   // Discard the test value and jump to the default if present, otherwise to
    881   // the end of the statement.
    882   __ bind(&next_test);
    883   DropOperands(1);  // Switch value is no longer needed.
    884   if (default_clause == NULL) {
    885     __ jmp(nested_statement.break_label());
    886   } else {
    887     __ jmp(default_clause->body_target());
    888   }
    889 
    890   // Compile all the case bodies.
    891   for (int i = 0; i < clauses->length(); i++) {
    892     Comment cmnt(masm_, "[ Case body");
    893     CaseClause* clause = clauses->at(i);
    894     __ bind(clause->body_target());
    895     PrepareForBailoutForId(clause->EntryId(), BailoutState::NO_REGISTERS);
    896     VisitStatements(clause->statements());
    897   }
    898 
    899   __ bind(nested_statement.break_label());
    900   PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
    901 }
    902 
    903 
    904 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
    905   Comment cmnt(masm_, "[ ForInStatement");
    906   SetStatementPosition(stmt, SKIP_BREAK);
    907 
    908   FeedbackSlot slot = stmt->ForInFeedbackSlot();
    909 
    910   // Get the object to enumerate over.
    911   SetExpressionAsStatementPosition(stmt->enumerable());
    912   VisitForAccumulatorValue(stmt->enumerable());
    913   OperandStackDepthIncrement(5);
    914 
    915   Label loop, exit;
    916   Iteration loop_statement(this, stmt);
    917   increment_loop_depth();
    918 
    919   // If the object is null or undefined, skip over the loop, otherwise convert
    920   // it to a JS receiver.  See ECMA-262 version 5, section 12.6.4.
    921   Label convert, done_convert;
    922   __ JumpIfSmi(eax, &convert, Label::kNear);
    923   __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
    924   __ j(above_equal, &done_convert, Label::kNear);
    925   __ cmp(eax, isolate()->factory()->undefined_value());
    926   __ j(equal, &exit);
    927   __ cmp(eax, isolate()->factory()->null_value());
    928   __ j(equal, &exit);
    929   __ bind(&convert);
    930   __ Call(isolate()->builtins()->ToObject(), RelocInfo::CODE_TARGET);
    931   RestoreContext();
    932   __ bind(&done_convert);
    933   PrepareForBailoutForId(stmt->ToObjectId(), BailoutState::TOS_REGISTER);
    934   __ push(eax);
    935 
    936   // Check cache validity in generated code. If we cannot guarantee cache
    937   // validity, call the runtime system to check cache validity or get the
    938   // property names in a fixed array. Note: Proxies never have an enum cache,
    939   // so will always take the slow path.
    940   Label call_runtime, use_cache, fixed_array;
    941   __ CheckEnumCache(&call_runtime);
    942 
    943   __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
    944   __ jmp(&use_cache, Label::kNear);
    945 
    946   // Get the set of properties to enumerate.
    947   __ bind(&call_runtime);
    948   __ push(eax);
    949   __ CallRuntime(Runtime::kForInEnumerate);
    950   PrepareForBailoutForId(stmt->EnumId(), BailoutState::TOS_REGISTER);
    951   __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
    952          isolate()->factory()->meta_map());
    953   __ j(not_equal, &fixed_array);
    954 
    955 
    956   // We got a map in register eax. Get the enumeration cache from it.
    957   Label no_descriptors;
    958   __ bind(&use_cache);
    959 
    960   __ EnumLength(edx, eax);
    961   __ cmp(edx, Immediate(Smi::kZero));
    962   __ j(equal, &no_descriptors);
    963 
    964   __ LoadInstanceDescriptors(eax, ecx);
    965   __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheOffset));
    966   __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
    967 
    968   // Set up the four remaining stack slots.
    969   __ push(eax);  // Map.
    970   __ push(ecx);  // Enumeration cache.
    971   __ push(edx);  // Number of valid entries for the map in the enum cache.
    972   __ push(Immediate(Smi::kZero));  // Initial index.
    973   __ jmp(&loop);
    974 
    975   __ bind(&no_descriptors);
    976   __ add(esp, Immediate(kPointerSize));
    977   __ jmp(&exit);
    978 
    979   // We got a fixed array in register eax. Iterate through that.
    980   __ bind(&fixed_array);
    981 
    982   __ push(Immediate(Smi::FromInt(1)));  // Smi(1) indicates slow check
    983   __ push(eax);  // Array
    984   __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset));
    985   __ push(eax);  // Fixed array length (as smi).
    986   PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
    987   __ push(Immediate(Smi::kZero));  // Initial index.
    988 
    989   // Generate code for doing the condition check.
    990   __ bind(&loop);
    991   SetExpressionAsStatementPosition(stmt->each());
    992 
    993   __ mov(eax, Operand(esp, 0 * kPointerSize));  // Get the current index.
    994   __ cmp(eax, Operand(esp, 1 * kPointerSize));  // Compare to the array length.
    995   __ j(above_equal, loop_statement.break_label());
    996 
    997   // Get the current entry of the array into register eax.
    998   __ mov(ebx, Operand(esp, 2 * kPointerSize));
    999   __ mov(eax, FieldOperand(ebx, eax, times_2, FixedArray::kHeaderSize));
   1000 
   1001   // Get the expected map from the stack or a smi in the
   1002   // permanent slow case into register edx.
   1003   __ mov(edx, Operand(esp, 3 * kPointerSize));
   1004 
   1005   // Check if the expected map still matches that of the enumerable.
   1006   // If not, we may have to filter the key.
   1007   Label update_each;
   1008   __ mov(ebx, Operand(esp, 4 * kPointerSize));
   1009   __ cmp(edx, FieldOperand(ebx, HeapObject::kMapOffset));
   1010   __ j(equal, &update_each, Label::kNear);
   1011 
   1012   // We need to filter the key, record slow-path here.
   1013   int const vector_index = SmiFromSlot(slot)->value();
   1014   __ EmitLoadFeedbackVector(edx);
   1015   __ mov(FieldOperand(edx, FixedArray::OffsetOfElementAt(vector_index)),
   1016          Immediate(FeedbackVector::MegamorphicSentinel(isolate())));
   1017 
   1018   // eax contains the key.  The receiver in ebx is the second argument to the
   1019   // ForInFilter.  ForInFilter returns undefined if the receiver doesn't
   1020   // have the key or returns the name-converted key.
   1021   __ Call(isolate()->builtins()->ForInFilter(), RelocInfo::CODE_TARGET);
   1022   RestoreContext();
   1023   PrepareForBailoutForId(stmt->FilterId(), BailoutState::TOS_REGISTER);
   1024   __ JumpIfRoot(result_register(), Heap::kUndefinedValueRootIndex,
   1025                 loop_statement.continue_label());
   1026 
   1027   // Update the 'each' property or variable from the possibly filtered
   1028   // entry in register eax.
   1029   __ bind(&update_each);
   1030   // Perform the assignment as if via '='.
   1031   { EffectContext context(this);
   1032     EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
   1033     PrepareForBailoutForId(stmt->AssignmentId(), BailoutState::NO_REGISTERS);
   1034   }
   1035 
   1036   // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
   1037   PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS);
   1038   // Generate code for the body of the loop.
   1039   Visit(stmt->body());
   1040 
   1041   // Generate code for going to the next element by incrementing the
   1042   // index (smi) stored on top of the stack.
   1043   __ bind(loop_statement.continue_label());
   1044   PrepareForBailoutForId(stmt->IncrementId(), BailoutState::NO_REGISTERS);
   1045   __ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1)));
   1046 
   1047   EmitBackEdgeBookkeeping(stmt, &loop);
   1048   __ jmp(&loop);
   1049 
   1050   // Remove the pointers stored on the stack.
   1051   __ bind(loop_statement.break_label());
   1052   DropOperands(5);
   1053 
   1054   // Exit and decrement the loop depth.
   1055   PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
   1056   __ bind(&exit);
   1057   decrement_loop_depth();
   1058 }
   1059 
   1060 void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
   1061                                           FeedbackSlot slot) {
   1062   DCHECK(NeedsHomeObject(initializer));
   1063   __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
   1064   __ mov(StoreDescriptor::ValueRegister(), Operand(esp, offset * kPointerSize));
   1065   CallStoreIC(slot, isolate()->factory()->home_object_symbol());
   1066 }
   1067 
   1068 void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
   1069                                                      int offset,
   1070                                                      FeedbackSlot slot) {
   1071   DCHECK(NeedsHomeObject(initializer));
   1072   __ mov(StoreDescriptor::ReceiverRegister(), eax);
   1073   __ mov(StoreDescriptor::ValueRegister(), Operand(esp, offset * kPointerSize));
   1074   CallStoreIC(slot, isolate()->factory()->home_object_symbol());
   1075 }
   1076 
   1077 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
   1078                                          TypeofMode typeof_mode) {
   1079   SetExpressionPosition(proxy);
   1080   PrepareForBailoutForId(proxy->BeforeId(), BailoutState::NO_REGISTERS);
   1081   Variable* var = proxy->var();
   1082 
   1083   // Two cases: global variables and all other types of variables.
   1084   switch (var->location()) {
   1085     case VariableLocation::UNALLOCATED: {
   1086       Comment cmnt(masm_, "[ Global variable");
   1087       EmitGlobalVariableLoad(proxy, typeof_mode);
   1088       context()->Plug(eax);
   1089       break;
   1090     }
   1091 
   1092     case VariableLocation::PARAMETER:
   1093     case VariableLocation::LOCAL:
   1094     case VariableLocation::CONTEXT: {
   1095       DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
   1096       Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
   1097                                                : "[ Stack variable");
   1098 
   1099       if (proxy->hole_check_mode() == HoleCheckMode::kRequired) {
   1100         // Throw a reference error when using an uninitialized let/const
   1101         // binding in harmony mode.
   1102         Label done;
   1103         GetVar(eax, var);
   1104         __ cmp(eax, isolate()->factory()->the_hole_value());
   1105         __ j(not_equal, &done, Label::kNear);
   1106         __ push(Immediate(var->name()));
   1107         __ CallRuntime(Runtime::kThrowReferenceError);
   1108         __ bind(&done);
   1109         context()->Plug(eax);
   1110         break;
   1111       }
   1112       context()->Plug(var);
   1113       break;
   1114     }
   1115 
   1116     case VariableLocation::LOOKUP:
   1117     case VariableLocation::MODULE:
   1118       UNREACHABLE();
   1119   }
   1120 }
   1121 
   1122 
   1123 void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
   1124   Expression* expression = (property == NULL) ? NULL : property->value();
   1125   if (expression == NULL) {
   1126     PushOperand(isolate()->factory()->null_value());
   1127   } else {
   1128     VisitForStackValue(expression);
   1129     if (NeedsHomeObject(expression)) {
   1130       DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
   1131              property->kind() == ObjectLiteral::Property::SETTER);
   1132       int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
   1133       EmitSetHomeObject(expression, offset, property->GetSlot());
   1134     }
   1135   }
   1136 }
   1137 
   1138 
   1139 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
   1140   Comment cmnt(masm_, "[ ObjectLiteral");
   1141 
   1142   Handle<BoilerplateDescription> constant_properties =
   1143       expr->GetOrBuildConstantProperties(isolate());
   1144   int flags = expr->ComputeFlags();
   1145   // If any of the keys would store to the elements array, then we shouldn't
   1146   // allow it.
   1147   if (MustCreateObjectLiteralWithRuntime(expr)) {
   1148     __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
   1149     __ push(Immediate(SmiFromSlot(expr->literal_slot())));
   1150     __ push(Immediate(constant_properties));
   1151     __ push(Immediate(Smi::FromInt(flags)));
   1152     __ CallRuntime(Runtime::kCreateObjectLiteral);
   1153   } else {
   1154     __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
   1155     __ mov(ebx, Immediate(SmiFromSlot(expr->literal_slot())));
   1156     __ mov(ecx, Immediate(constant_properties));
   1157     __ mov(edx, Immediate(Smi::FromInt(flags)));
   1158     Callable callable = CodeFactory::FastCloneShallowObject(
   1159         isolate(), expr->properties_count());
   1160     __ Call(callable.code(), RelocInfo::CODE_TARGET);
   1161     RestoreContext();
   1162   }
   1163   PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
   1164 
   1165   // If result_saved is true the result is on top of the stack.  If
   1166   // result_saved is false the result is in eax.
   1167   bool result_saved = false;
   1168 
   1169   AccessorTable accessor_table(zone());
   1170   for (int i = 0; i < expr->properties()->length(); i++) {
   1171     ObjectLiteral::Property* property = expr->properties()->at(i);
   1172     DCHECK(!property->is_computed_name());
   1173     if (property->IsCompileTimeValue()) continue;
   1174 
   1175     Literal* key = property->key()->AsLiteral();
   1176     Expression* value = property->value();
   1177     if (!result_saved) {
   1178       PushOperand(eax);  // Save result on the stack
   1179       result_saved = true;
   1180     }
   1181     switch (property->kind()) {
   1182       case ObjectLiteral::Property::SPREAD:
   1183       case ObjectLiteral::Property::CONSTANT:
   1184         UNREACHABLE();
   1185       case ObjectLiteral::Property::MATERIALIZED_LITERAL:
   1186         DCHECK(!CompileTimeValue::IsCompileTimeValue(value));
   1187         // Fall through.
   1188       case ObjectLiteral::Property::COMPUTED:
   1189         // It is safe to use [[Put]] here because the boilerplate already
   1190         // contains computed properties with an uninitialized value.
   1191         if (key->IsStringLiteral()) {
   1192           DCHECK(key->IsPropertyName());
   1193           if (property->emit_store()) {
   1194             VisitForAccumulatorValue(value);
   1195             DCHECK(StoreDescriptor::ValueRegister().is(eax));
   1196             __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
   1197             CallStoreIC(property->GetSlot(0), key->value(), true);
   1198             PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
   1199             if (NeedsHomeObject(value)) {
   1200               EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
   1201             }
   1202           } else {
   1203             VisitForEffect(value);
   1204           }
   1205           break;
   1206         }
   1207         PushOperand(Operand(esp, 0));  // Duplicate receiver.
   1208         VisitForStackValue(key);
   1209         VisitForStackValue(value);
   1210         if (property->emit_store()) {
   1211           if (NeedsHomeObject(value)) {
   1212             EmitSetHomeObject(value, 2, property->GetSlot());
   1213           }
   1214           PushOperand(Smi::FromInt(SLOPPY));  // Language mode
   1215           CallRuntimeWithOperands(Runtime::kSetProperty);
   1216         } else {
   1217           DropOperands(3);
   1218         }
   1219         break;
   1220       case ObjectLiteral::Property::PROTOTYPE:
   1221         PushOperand(Operand(esp, 0));  // Duplicate receiver.
   1222         VisitForStackValue(value);
   1223         DCHECK(property->emit_store());
   1224         CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
   1225         PrepareForBailoutForId(expr->GetIdForPropertySet(i),
   1226                                BailoutState::NO_REGISTERS);
   1227         break;
   1228       case ObjectLiteral::Property::GETTER:
   1229         if (property->emit_store()) {
   1230           AccessorTable::Iterator it = accessor_table.lookup(key);
   1231           it->second->bailout_id = expr->GetIdForPropertySet(i);
   1232           it->second->getter = property;
   1233         }
   1234         break;
   1235       case ObjectLiteral::Property::SETTER:
   1236         if (property->emit_store()) {
   1237           AccessorTable::Iterator it = accessor_table.lookup(key);
   1238           it->second->bailout_id = expr->GetIdForPropertySet(i);
   1239           it->second->setter = property;
   1240         }
   1241         break;
   1242     }
   1243   }
   1244 
   1245   // Emit code to define accessors, using only a single call to the runtime for
   1246   // each pair of corresponding getters and setters.
   1247   for (AccessorTable::Iterator it = accessor_table.begin();
   1248        it != accessor_table.end();
   1249        ++it) {
   1250     PushOperand(Operand(esp, 0));  // Duplicate receiver.
   1251     VisitForStackValue(it->first);
   1252 
   1253     EmitAccessor(it->second->getter);
   1254     EmitAccessor(it->second->setter);
   1255 
   1256     PushOperand(Smi::FromInt(NONE));
   1257     CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
   1258     PrepareForBailoutForId(it->second->bailout_id, BailoutState::NO_REGISTERS);
   1259   }
   1260 
   1261   if (result_saved) {
   1262     context()->PlugTOS();
   1263   } else {
   1264     context()->Plug(eax);
   1265   }
   1266 }
   1267 
   1268 
   1269 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
   1270   Comment cmnt(masm_, "[ ArrayLiteral");
   1271 
   1272   Handle<ConstantElementsPair> constant_elements =
   1273       expr->GetOrBuildConstantElements(isolate());
   1274 
   1275   if (MustCreateArrayLiteralWithRuntime(expr)) {
   1276     __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
   1277     __ push(Immediate(SmiFromSlot(expr->literal_slot())));
   1278     __ push(Immediate(constant_elements));
   1279     __ push(Immediate(Smi::FromInt(expr->ComputeFlags())));
   1280     __ CallRuntime(Runtime::kCreateArrayLiteral);
   1281   } else {
   1282     __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
   1283     __ mov(ebx, Immediate(SmiFromSlot(expr->literal_slot())));
   1284     __ mov(ecx, Immediate(constant_elements));
   1285     Callable callable =
   1286         CodeFactory::FastCloneShallowArray(isolate(), TRACK_ALLOCATION_SITE);
   1287     __ Call(callable.code(), RelocInfo::CODE_TARGET);
   1288     RestoreContext();
   1289   }
   1290   PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
   1291 
   1292   bool result_saved = false;  // Is the result saved to the stack?
   1293   ZoneList<Expression*>* subexprs = expr->values();
   1294   int length = subexprs->length();
   1295 
   1296   // Emit code to evaluate all the non-constant subexpressions and to store
   1297   // them into the newly cloned array.
   1298   for (int array_index = 0; array_index < length; array_index++) {
   1299     Expression* subexpr = subexprs->at(array_index);
   1300     DCHECK(!subexpr->IsSpread());
   1301 
   1302     // If the subexpression is a literal or a simple materialized literal it
   1303     // is already set in the cloned array.
   1304     if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
   1305 
   1306     if (!result_saved) {
   1307       PushOperand(eax);  // array literal.
   1308       result_saved = true;
   1309     }
   1310     VisitForAccumulatorValue(subexpr);
   1311 
   1312     __ mov(StoreDescriptor::NameRegister(),
   1313            Immediate(Smi::FromInt(array_index)));
   1314     __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
   1315     CallKeyedStoreIC(expr->LiteralFeedbackSlot());
   1316     PrepareForBailoutForId(expr->GetIdForElement(array_index),
   1317                            BailoutState::NO_REGISTERS);
   1318   }
   1319 
   1320   if (result_saved) {
   1321     context()->PlugTOS();
   1322   } else {
   1323     context()->Plug(eax);
   1324   }
   1325 }
   1326 
   1327 
   1328 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
   1329   DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
   1330 
   1331   Comment cmnt(masm_, "[ Assignment");
   1332 
   1333   Property* property = expr->target()->AsProperty();
   1334   LhsKind assign_type = Property::GetAssignType(property);
   1335 
   1336   // Evaluate LHS expression.
   1337   switch (assign_type) {
   1338     case VARIABLE:
   1339       // Nothing to do here.
   1340       break;
   1341     case NAMED_PROPERTY:
   1342       if (expr->is_compound()) {
   1343         // We need the receiver both on the stack and in the register.
   1344         VisitForStackValue(property->obj());
   1345         __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
   1346       } else {
   1347         VisitForStackValue(property->obj());
   1348       }
   1349       break;
   1350     case KEYED_PROPERTY: {
   1351       if (expr->is_compound()) {
   1352         VisitForStackValue(property->obj());
   1353         VisitForStackValue(property->key());
   1354         __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, kPointerSize));
   1355         __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0));
   1356       } else {
   1357         VisitForStackValue(property->obj());
   1358         VisitForStackValue(property->key());
   1359       }
   1360       break;
   1361     }
   1362     case NAMED_SUPER_PROPERTY:
   1363     case KEYED_SUPER_PROPERTY:
   1364       UNREACHABLE();
   1365       break;
   1366   }
   1367 
   1368   // For compound assignments we need another deoptimization point after the
   1369   // variable/property load.
   1370   if (expr->is_compound()) {
   1371     AccumulatorValueContext result_context(this);
   1372     { AccumulatorValueContext left_operand_context(this);
   1373       switch (assign_type) {
   1374         case VARIABLE:
   1375           EmitVariableLoad(expr->target()->AsVariableProxy());
   1376           PrepareForBailout(expr->target(), BailoutState::TOS_REGISTER);
   1377           break;
   1378         case NAMED_PROPERTY:
   1379           EmitNamedPropertyLoad(property);
   1380           PrepareForBailoutForId(property->LoadId(),
   1381                                  BailoutState::TOS_REGISTER);
   1382           break;
   1383         case KEYED_PROPERTY:
   1384           EmitKeyedPropertyLoad(property);
   1385           PrepareForBailoutForId(property->LoadId(),
   1386                                  BailoutState::TOS_REGISTER);
   1387           break;
   1388         case NAMED_SUPER_PROPERTY:
   1389         case KEYED_SUPER_PROPERTY:
   1390           UNREACHABLE();
   1391           break;
   1392       }
   1393     }
   1394 
   1395     Token::Value op = expr->binary_op();
   1396     PushOperand(eax);  // Left operand goes on the stack.
   1397     VisitForAccumulatorValue(expr->value());
   1398 
   1399     if (ShouldInlineSmiCase(op)) {
   1400       EmitInlineSmiBinaryOp(expr->binary_operation(),
   1401                             op,
   1402                             expr->target(),
   1403                             expr->value());
   1404     } else {
   1405       EmitBinaryOp(expr->binary_operation(), op);
   1406     }
   1407 
   1408     // Deoptimization point in case the binary operation may have side effects.
   1409     PrepareForBailout(expr->binary_operation(), BailoutState::TOS_REGISTER);
   1410   } else {
   1411     VisitForAccumulatorValue(expr->value());
   1412   }
   1413 
   1414   SetExpressionPosition(expr);
   1415 
   1416   // Store the value.
   1417   switch (assign_type) {
   1418     case VARIABLE: {
   1419       VariableProxy* proxy = expr->target()->AsVariableProxy();
   1420       EmitVariableAssignment(proxy->var(), expr->op(), expr->AssignmentSlot(),
   1421                              proxy->hole_check_mode());
   1422       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
   1423       context()->Plug(eax);
   1424       break;
   1425     }
   1426     case NAMED_PROPERTY:
   1427       EmitNamedPropertyAssignment(expr);
   1428       break;
   1429     case KEYED_PROPERTY:
   1430       EmitKeyedPropertyAssignment(expr);
   1431       break;
   1432     case NAMED_SUPER_PROPERTY:
   1433     case KEYED_SUPER_PROPERTY:
   1434       UNREACHABLE();
   1435       break;
   1436   }
   1437 }
   1438 
   1439 
   1440 void FullCodeGenerator::VisitYield(Yield* expr) {
   1441   // Resumable functions are not supported.
   1442   UNREACHABLE();
   1443 }
   1444 
   1445 void FullCodeGenerator::PushOperand(MemOperand operand) {
   1446   OperandStackDepthIncrement(1);
   1447   __ Push(operand);
   1448 }
   1449 
   1450 void FullCodeGenerator::EmitOperandStackDepthCheck() {
   1451   if (FLAG_debug_code) {
   1452     int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
   1453                         operand_stack_depth_ * kPointerSize;
   1454     __ mov(eax, ebp);
   1455     __ sub(eax, esp);
   1456     __ cmp(eax, Immediate(expected_diff));
   1457     __ Assert(equal, kUnexpectedStackDepth);
   1458   }
   1459 }
   1460 
   1461 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
   1462   Label allocate, done_allocate;
   1463 
   1464   __ Allocate(JSIteratorResult::kSize, eax, ecx, edx, &allocate,
   1465               NO_ALLOCATION_FLAGS);
   1466   __ jmp(&done_allocate, Label::kNear);
   1467 
   1468   __ bind(&allocate);
   1469   __ Push(Smi::FromInt(JSIteratorResult::kSize));
   1470   __ CallRuntime(Runtime::kAllocateInNewSpace);
   1471 
   1472   __ bind(&done_allocate);
   1473   __ mov(ebx, NativeContextOperand());
   1474   __ mov(ebx, ContextOperand(ebx, Context::ITERATOR_RESULT_MAP_INDEX));
   1475   __ mov(FieldOperand(eax, HeapObject::kMapOffset), ebx);
   1476   __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
   1477          isolate()->factory()->empty_fixed_array());
   1478   __ mov(FieldOperand(eax, JSObject::kElementsOffset),
   1479          isolate()->factory()->empty_fixed_array());
   1480   __ pop(FieldOperand(eax, JSIteratorResult::kValueOffset));
   1481   __ mov(FieldOperand(eax, JSIteratorResult::kDoneOffset),
   1482          isolate()->factory()->ToBoolean(done));
   1483   STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
   1484   OperandStackDepthDecrement(1);
   1485 }
   1486 
   1487 
   1488 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
   1489                                               Token::Value op,
   1490                                               Expression* left,
   1491                                               Expression* right) {
   1492   // Do combined smi check of the operands. Left operand is on the
   1493   // stack. Right operand is in eax.
   1494   Label smi_case, done, stub_call;
   1495   PopOperand(edx);
   1496   __ mov(ecx, eax);
   1497   __ or_(eax, edx);
   1498   JumpPatchSite patch_site(masm_);
   1499   patch_site.EmitJumpIfSmi(eax, &smi_case, Label::kNear);
   1500 
   1501   __ bind(&stub_call);
   1502   __ mov(eax, ecx);
   1503   Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
   1504   CallIC(code, expr->BinaryOperationFeedbackId());
   1505   patch_site.EmitPatchInfo();
   1506   __ jmp(&done, Label::kNear);
   1507 
   1508   // Smi case.
   1509   __ bind(&smi_case);
   1510   __ mov(eax, edx);  // Copy left operand in case of a stub call.
   1511 
   1512   switch (op) {
   1513     case Token::SAR:
   1514       __ SmiUntag(ecx);
   1515       __ sar_cl(eax);  // No checks of result necessary
   1516       __ and_(eax, Immediate(~kSmiTagMask));
   1517       break;
   1518     case Token::SHL: {
   1519       Label result_ok;
   1520       __ SmiUntag(eax);
   1521       __ SmiUntag(ecx);
   1522       __ shl_cl(eax);
   1523       // Check that the *signed* result fits in a smi.
   1524       __ cmp(eax, 0xc0000000);
   1525       __ j(positive, &result_ok);
   1526       __ SmiTag(ecx);
   1527       __ jmp(&stub_call);
   1528       __ bind(&result_ok);
   1529       __ SmiTag(eax);
   1530       break;
   1531     }
   1532     case Token::SHR: {
   1533       Label result_ok;
   1534       __ SmiUntag(eax);
   1535       __ SmiUntag(ecx);
   1536       __ shr_cl(eax);
   1537       __ test(eax, Immediate(0xc0000000));
   1538       __ j(zero, &result_ok);
   1539       __ SmiTag(ecx);
   1540       __ jmp(&stub_call);
   1541       __ bind(&result_ok);
   1542       __ SmiTag(eax);
   1543       break;
   1544     }
   1545     case Token::ADD:
   1546       __ add(eax, ecx);
   1547       __ j(overflow, &stub_call);
   1548       break;
   1549     case Token::SUB:
   1550       __ sub(eax, ecx);
   1551       __ j(overflow, &stub_call);
   1552       break;
   1553     case Token::MUL: {
   1554       __ SmiUntag(eax);
   1555       __ imul(eax, ecx);
   1556       __ j(overflow, &stub_call);
   1557       __ test(eax, eax);
   1558       __ j(not_zero, &done, Label::kNear);
   1559       __ mov(ebx, edx);
   1560       __ or_(ebx, ecx);
   1561       __ j(negative, &stub_call);
   1562       break;
   1563     }
   1564     case Token::BIT_OR:
   1565       __ or_(eax, ecx);
   1566       break;
   1567     case Token::BIT_AND:
   1568       __ and_(eax, ecx);
   1569       break;
   1570     case Token::BIT_XOR:
   1571       __ xor_(eax, ecx);
   1572       break;
   1573     default:
   1574       UNREACHABLE();
   1575   }
   1576 
   1577   __ bind(&done);
   1578   context()->Plug(eax);
   1579 }
   1580 
   1581 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
   1582   PopOperand(edx);
   1583   Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
   1584   JumpPatchSite patch_site(masm_);    // unbound, signals no inlined smi code.
   1585   CallIC(code, expr->BinaryOperationFeedbackId());
   1586   patch_site.EmitPatchInfo();
   1587   context()->Plug(eax);
   1588 }
   1589 
   1590 void FullCodeGenerator::EmitAssignment(Expression* expr, FeedbackSlot slot) {
   1591   DCHECK(expr->IsValidReferenceExpressionOrThis());
   1592 
   1593   Property* prop = expr->AsProperty();
   1594   LhsKind assign_type = Property::GetAssignType(prop);
   1595 
   1596   switch (assign_type) {
   1597     case VARIABLE: {
   1598       VariableProxy* proxy = expr->AsVariableProxy();
   1599       EffectContext context(this);
   1600       EmitVariableAssignment(proxy->var(), Token::ASSIGN, slot,
   1601                              proxy->hole_check_mode());
   1602       break;
   1603     }
   1604     case NAMED_PROPERTY: {
   1605       PushOperand(eax);  // Preserve value.
   1606       VisitForAccumulatorValue(prop->obj());
   1607       __ Move(StoreDescriptor::ReceiverRegister(), eax);
   1608       PopOperand(StoreDescriptor::ValueRegister());  // Restore value.
   1609       CallStoreIC(slot, prop->key()->AsLiteral()->value());
   1610       break;
   1611     }
   1612     case KEYED_PROPERTY: {
   1613       PushOperand(eax);  // Preserve value.
   1614       VisitForStackValue(prop->obj());
   1615       VisitForAccumulatorValue(prop->key());
   1616       __ Move(StoreDescriptor::NameRegister(), eax);
   1617       PopOperand(StoreDescriptor::ReceiverRegister());  // Receiver.
   1618       PopOperand(StoreDescriptor::ValueRegister());     // Restore value.
   1619       CallKeyedStoreIC(slot);
   1620       break;
   1621     }
   1622     case NAMED_SUPER_PROPERTY:
   1623     case KEYED_SUPER_PROPERTY:
   1624       UNREACHABLE();
   1625       break;
   1626   }
   1627   context()->Plug(eax);
   1628 }
   1629 
   1630 
   1631 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
   1632     Variable* var, MemOperand location) {
   1633   __ mov(location, eax);
   1634   if (var->IsContextSlot()) {
   1635     __ mov(edx, eax);
   1636     int offset = Context::SlotOffset(var->index());
   1637     __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs);
   1638   }
   1639 }
   1640 
   1641 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
   1642                                                FeedbackSlot slot,
   1643                                                HoleCheckMode hole_check_mode) {
   1644   if (var->IsUnallocated()) {
   1645     // Global var, const, or let.
   1646     __ mov(StoreDescriptor::ReceiverRegister(), NativeContextOperand());
   1647     __ mov(StoreDescriptor::ReceiverRegister(),
   1648            ContextOperand(StoreDescriptor::ReceiverRegister(),
   1649                           Context::EXTENSION_INDEX));
   1650     CallStoreIC(slot, var->name());
   1651 
   1652   } else if (IsLexicalVariableMode(var->mode()) && op != Token::INIT) {
   1653     DCHECK(!var->IsLookupSlot());
   1654     DCHECK(var->IsStackAllocated() || var->IsContextSlot());
   1655     MemOperand location = VarOperand(var, ecx);
   1656     // Perform an initialization check for lexically declared variables.
   1657     if (hole_check_mode == HoleCheckMode::kRequired) {
   1658       Label assign;
   1659       __ mov(edx, location);
   1660       __ cmp(edx, isolate()->factory()->the_hole_value());
   1661       __ j(not_equal, &assign, Label::kNear);
   1662       __ push(Immediate(var->name()));
   1663       __ CallRuntime(Runtime::kThrowReferenceError);
   1664       __ bind(&assign);
   1665     }
   1666     if (var->mode() != CONST) {
   1667       EmitStoreToStackLocalOrContextSlot(var, location);
   1668     } else if (var->throw_on_const_assignment(language_mode())) {
   1669       __ CallRuntime(Runtime::kThrowConstAssignError);
   1670     }
   1671   } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
   1672     // Initializing assignment to const {this} needs a write barrier.
   1673     DCHECK(var->IsStackAllocated() || var->IsContextSlot());
   1674     Label uninitialized_this;
   1675     MemOperand location = VarOperand(var, ecx);
   1676     __ mov(edx, location);
   1677     __ cmp(edx, isolate()->factory()->the_hole_value());
   1678     __ j(equal, &uninitialized_this);
   1679     __ push(Immediate(var->name()));
   1680     __ CallRuntime(Runtime::kThrowReferenceError);
   1681     __ bind(&uninitialized_this);
   1682     EmitStoreToStackLocalOrContextSlot(var, location);
   1683 
   1684   } else {
   1685     DCHECK(var->mode() != CONST || op == Token::INIT);
   1686     DCHECK(var->IsStackAllocated() || var->IsContextSlot());
   1687     DCHECK(!var->IsLookupSlot());
   1688     // Assignment to var or initializing assignment to let/const in harmony
   1689     // mode.
   1690     MemOperand location = VarOperand(var, ecx);
   1691     if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
   1692       // Check for an uninitialized let binding.
   1693       __ mov(edx, location);
   1694       __ cmp(edx, isolate()->factory()->the_hole_value());
   1695       __ Check(equal, kLetBindingReInitialization);
   1696     }
   1697     EmitStoreToStackLocalOrContextSlot(var, location);
   1698   }
   1699 }
   1700 
   1701 
   1702 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
   1703   // Assignment to a property, using a named store IC.
   1704   // eax    : value
   1705   // esp[0] : receiver
   1706   Property* prop = expr->target()->AsProperty();
   1707   DCHECK(prop != NULL);
   1708   DCHECK(prop->key()->IsLiteral());
   1709 
   1710   PopOperand(StoreDescriptor::ReceiverRegister());
   1711   CallStoreIC(expr->AssignmentSlot(), prop->key()->AsLiteral()->value());
   1712   PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
   1713   context()->Plug(eax);
   1714 }
   1715 
   1716 
   1717 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
   1718   // Assignment to a property, using a keyed store IC.
   1719   // eax               : value
   1720   // esp[0]            : key
   1721   // esp[kPointerSize] : receiver
   1722 
   1723   PopOperand(StoreDescriptor::NameRegister());  // Key.
   1724   PopOperand(StoreDescriptor::ReceiverRegister());
   1725   DCHECK(StoreDescriptor::ValueRegister().is(eax));
   1726   CallKeyedStoreIC(expr->AssignmentSlot());
   1727   PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
   1728   context()->Plug(eax);
   1729 }
   1730 
   1731 // Code common for calls using the IC.
   1732 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
   1733   Expression* callee = expr->expression();
   1734 
   1735   // Get the target function.
   1736   ConvertReceiverMode convert_mode;
   1737   if (callee->IsVariableProxy()) {
   1738     { StackValueContext context(this);
   1739       EmitVariableLoad(callee->AsVariableProxy());
   1740       PrepareForBailout(callee, BailoutState::NO_REGISTERS);
   1741     }
   1742     // Push undefined as receiver. This is patched in the method prologue if it
   1743     // is a sloppy mode method.
   1744     PushOperand(isolate()->factory()->undefined_value());
   1745     convert_mode = ConvertReceiverMode::kNullOrUndefined;
   1746   } else {
   1747     // Load the function from the receiver.
   1748     DCHECK(callee->IsProperty());
   1749     DCHECK(!callee->AsProperty()->IsSuperAccess());
   1750     __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
   1751     EmitNamedPropertyLoad(callee->AsProperty());
   1752     PrepareForBailoutForId(callee->AsProperty()->LoadId(),
   1753                            BailoutState::TOS_REGISTER);
   1754     // Push the target function under the receiver.
   1755     PushOperand(Operand(esp, 0));
   1756     __ mov(Operand(esp, kPointerSize), eax);
   1757     convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
   1758   }
   1759 
   1760   EmitCall(expr, convert_mode);
   1761 }
   1762 
   1763 
   1764 // Code common for calls using the IC.
   1765 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
   1766                                                 Expression* key) {
   1767   // Load the key.
   1768   VisitForAccumulatorValue(key);
   1769 
   1770   Expression* callee = expr->expression();
   1771 
   1772   // Load the function from the receiver.
   1773   DCHECK(callee->IsProperty());
   1774   __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
   1775   __ mov(LoadDescriptor::NameRegister(), eax);
   1776   EmitKeyedPropertyLoad(callee->AsProperty());
   1777   PrepareForBailoutForId(callee->AsProperty()->LoadId(),
   1778                          BailoutState::TOS_REGISTER);
   1779 
   1780   // Push the target function under the receiver.
   1781   PushOperand(Operand(esp, 0));
   1782   __ mov(Operand(esp, kPointerSize), eax);
   1783 
   1784   EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
   1785 }
   1786 
   1787 
   1788 void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
   1789   // Load the arguments.
   1790   ZoneList<Expression*>* args = expr->arguments();
   1791   int arg_count = args->length();
   1792   for (int i = 0; i < arg_count; i++) {
   1793     VisitForStackValue(args->at(i));
   1794   }
   1795 
   1796   PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
   1797   SetCallPosition(expr, expr->tail_call_mode());
   1798   if (expr->tail_call_mode() == TailCallMode::kAllow) {
   1799     if (FLAG_trace) {
   1800       __ CallRuntime(Runtime::kTraceTailCall);
   1801     }
   1802     // Update profiling counters before the tail call since we will
   1803     // not return to this function.
   1804     EmitProfilingCounterHandlingForReturnSequence(true);
   1805   }
   1806   Handle<Code> code =
   1807       CodeFactory::CallICTrampoline(isolate(), mode, expr->tail_call_mode())
   1808           .code();
   1809   __ Move(edx, Immediate(IntFromSlot(expr->CallFeedbackICSlot())));
   1810   __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
   1811   __ Move(eax, Immediate(arg_count));
   1812   CallIC(code);
   1813   OperandStackDepthDecrement(arg_count + 1);
   1814 
   1815   RecordJSReturnSite(expr);
   1816   RestoreContext();
   1817   context()->DropAndPlug(1, eax);
   1818 }
   1819 
   1820 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
   1821   Comment cmnt(masm_, "[ CallNew");
   1822   // According to ECMA-262, section 11.2.2, page 44, the function
   1823   // expression in new calls must be evaluated before the
   1824   // arguments.
   1825 
   1826   // Push constructor on the stack.  If it's not a function it's used as
   1827   // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
   1828   // ignored.
   1829   DCHECK(!expr->expression()->IsSuperPropertyReference());
   1830   VisitForStackValue(expr->expression());
   1831 
   1832   // Push the arguments ("left-to-right") on the stack.
   1833   ZoneList<Expression*>* args = expr->arguments();
   1834   int arg_count = args->length();
   1835   for (int i = 0; i < arg_count; i++) {
   1836     VisitForStackValue(args->at(i));
   1837   }
   1838 
   1839   // Call the construct call builtin that handles allocation and
   1840   // constructor invocation.
   1841   SetConstructCallPosition(expr);
   1842 
   1843   // Load function and argument count into edi and eax.
   1844   __ Move(eax, Immediate(arg_count));
   1845   __ mov(edi, Operand(esp, arg_count * kPointerSize));
   1846 
   1847   // Record call targets in unoptimized code.
   1848   __ EmitLoadFeedbackVector(ebx);
   1849   __ mov(edx, Immediate(SmiFromSlot(expr->CallNewFeedbackSlot())));
   1850 
   1851   CallConstructStub stub(isolate());
   1852   CallIC(stub.GetCode());
   1853   OperandStackDepthDecrement(arg_count + 1);
   1854   PrepareForBailoutForId(expr->ReturnId(), BailoutState::TOS_REGISTER);
   1855   RestoreContext();
   1856   context()->Plug(eax);
   1857 }
   1858 
   1859 
   1860 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
   1861   ZoneList<Expression*>* args = expr->arguments();
   1862   DCHECK(args->length() == 1);
   1863 
   1864   VisitForAccumulatorValue(args->at(0));
   1865 
   1866   Label materialize_true, materialize_false;
   1867   Label* if_true = NULL;
   1868   Label* if_false = NULL;
   1869   Label* fall_through = NULL;
   1870   context()->PrepareTest(&materialize_true, &materialize_false,
   1871                          &if_true, &if_false, &fall_through);
   1872 
   1873   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   1874   __ test(eax, Immediate(kSmiTagMask));
   1875   Split(zero, if_true, if_false, fall_through);
   1876 
   1877   context()->Plug(if_true, if_false);
   1878 }
   1879 
   1880 
   1881 void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
   1882   ZoneList<Expression*>* args = expr->arguments();
   1883   DCHECK(args->length() == 1);
   1884 
   1885   VisitForAccumulatorValue(args->at(0));
   1886 
   1887   Label materialize_true, materialize_false;
   1888   Label* if_true = NULL;
   1889   Label* if_false = NULL;
   1890   Label* fall_through = NULL;
   1891   context()->PrepareTest(&materialize_true, &materialize_false,
   1892                          &if_true, &if_false, &fall_through);
   1893 
   1894   __ JumpIfSmi(eax, if_false);
   1895   __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ebx);
   1896   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   1897   Split(above_equal, if_true, if_false, fall_through);
   1898 
   1899   context()->Plug(if_true, if_false);
   1900 }
   1901 
   1902 
   1903 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
   1904   ZoneList<Expression*>* args = expr->arguments();
   1905   DCHECK(args->length() == 1);
   1906 
   1907   VisitForAccumulatorValue(args->at(0));
   1908 
   1909   Label materialize_true, materialize_false;
   1910   Label* if_true = NULL;
   1911   Label* if_false = NULL;
   1912   Label* fall_through = NULL;
   1913   context()->PrepareTest(&materialize_true, &materialize_false,
   1914                          &if_true, &if_false, &fall_through);
   1915 
   1916   __ JumpIfSmi(eax, if_false);
   1917   __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
   1918   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   1919   Split(equal, if_true, if_false, fall_through);
   1920 
   1921   context()->Plug(if_true, if_false);
   1922 }
   1923 
   1924 
   1925 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
   1926   ZoneList<Expression*>* args = expr->arguments();
   1927   DCHECK(args->length() == 1);
   1928 
   1929   VisitForAccumulatorValue(args->at(0));
   1930 
   1931   Label materialize_true, materialize_false;
   1932   Label* if_true = NULL;
   1933   Label* if_false = NULL;
   1934   Label* fall_through = NULL;
   1935   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
   1936                          &if_false, &fall_through);
   1937 
   1938   __ JumpIfSmi(eax, if_false);
   1939   __ CmpObjectType(eax, JS_TYPED_ARRAY_TYPE, ebx);
   1940   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   1941   Split(equal, if_true, if_false, fall_through);
   1942 
   1943   context()->Plug(if_true, if_false);
   1944 }
   1945 
   1946 
   1947 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
   1948   ZoneList<Expression*>* args = expr->arguments();
   1949   DCHECK(args->length() == 1);
   1950 
   1951   VisitForAccumulatorValue(args->at(0));
   1952 
   1953   Label materialize_true, materialize_false;
   1954   Label* if_true = NULL;
   1955   Label* if_false = NULL;
   1956   Label* fall_through = NULL;
   1957   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
   1958                          &if_false, &fall_through);
   1959 
   1960   __ JumpIfSmi(eax, if_false);
   1961   __ CmpObjectType(eax, JS_PROXY_TYPE, ebx);
   1962   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   1963   Split(equal, if_true, if_false, fall_through);
   1964 
   1965   context()->Plug(if_true, if_false);
   1966 }
   1967 
   1968 
   1969 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
   1970   ZoneList<Expression*>* args = expr->arguments();
   1971   DCHECK(args->length() == 1);
   1972   Label done, null, function, non_function_constructor;
   1973 
   1974   VisitForAccumulatorValue(args->at(0));
   1975 
   1976   // If the object is not a JSReceiver, we return null.
   1977   __ JumpIfSmi(eax, &null, Label::kNear);
   1978   STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
   1979   __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, eax);
   1980   __ j(below, &null, Label::kNear);
   1981 
   1982   // Return 'Function' for JSFunction and JSBoundFunction objects.
   1983   __ CmpInstanceType(eax, FIRST_FUNCTION_TYPE);
   1984   STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
   1985   __ j(above_equal, &function, Label::kNear);
   1986 
   1987   // Check if the constructor in the map is a JS function.
   1988   __ GetMapConstructor(eax, eax, ebx);
   1989   __ CmpInstanceType(ebx, JS_FUNCTION_TYPE);
   1990   __ j(not_equal, &non_function_constructor, Label::kNear);
   1991 
   1992   // eax now contains the constructor function. Grab the
   1993   // instance class name from there.
   1994   __ mov(eax, FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset));
   1995   __ mov(eax, FieldOperand(eax, SharedFunctionInfo::kInstanceClassNameOffset));
   1996   __ jmp(&done, Label::kNear);
   1997 
   1998   // Non-JS objects have class null.
   1999   __ bind(&null);
   2000   __ mov(eax, isolate()->factory()->null_value());
   2001   __ jmp(&done, Label::kNear);
   2002 
   2003   // Functions have class 'Function'.
   2004   __ bind(&function);
   2005   __ mov(eax, isolate()->factory()->Function_string());
   2006   __ jmp(&done, Label::kNear);
   2007 
   2008   // Objects with a non-function constructor have class 'Object'.
   2009   __ bind(&non_function_constructor);
   2010   __ mov(eax, isolate()->factory()->Object_string());
   2011 
   2012   // All done.
   2013   __ bind(&done);
   2014 
   2015   context()->Plug(eax);
   2016 }
   2017 
   2018 
   2019 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
   2020   ZoneList<Expression*>* args = expr->arguments();
   2021   DCHECK(args->length() == 2);
   2022 
   2023   VisitForStackValue(args->at(0));
   2024   VisitForAccumulatorValue(args->at(1));
   2025 
   2026   Register object = ebx;
   2027   Register index = eax;
   2028   Register result = edx;
   2029 
   2030   PopOperand(object);
   2031 
   2032   Label need_conversion;
   2033   Label index_out_of_range;
   2034   Label done;
   2035   StringCharCodeAtGenerator generator(object, index, result, &need_conversion,
   2036                                       &need_conversion, &index_out_of_range);
   2037   generator.GenerateFast(masm_);
   2038   __ jmp(&done);
   2039 
   2040   __ bind(&index_out_of_range);
   2041   // When the index is out of range, the spec requires us to return
   2042   // NaN.
   2043   __ Move(result, Immediate(isolate()->factory()->nan_value()));
   2044   __ jmp(&done);
   2045 
   2046   __ bind(&need_conversion);
   2047   // Move the undefined value into the result register, which will
   2048   // trigger conversion.
   2049   __ Move(result, Immediate(isolate()->factory()->undefined_value()));
   2050   __ jmp(&done);
   2051 
   2052   NopRuntimeCallHelper call_helper;
   2053   generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
   2054 
   2055   __ bind(&done);
   2056   context()->Plug(result);
   2057 }
   2058 
   2059 
   2060 void FullCodeGenerator::EmitCall(CallRuntime* expr) {
   2061   ZoneList<Expression*>* args = expr->arguments();
   2062   DCHECK_LE(2, args->length());
   2063   // Push target, receiver and arguments onto the stack.
   2064   for (Expression* const arg : *args) {
   2065     VisitForStackValue(arg);
   2066   }
   2067   PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
   2068   // Move target to edi.
   2069   int const argc = args->length() - 2;
   2070   __ mov(edi, Operand(esp, (argc + 1) * kPointerSize));
   2071   // Call the target.
   2072   __ mov(eax, Immediate(argc));
   2073   __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
   2074   OperandStackDepthDecrement(argc + 1);
   2075   RestoreContext();
   2076   // Discard the function left on TOS.
   2077   context()->DropAndPlug(1, eax);
   2078 }
   2079 
   2080 void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
   2081   ZoneList<Expression*>* args = expr->arguments();
   2082   DCHECK_EQ(1, args->length());
   2083   VisitForAccumulatorValue(args->at(0));
   2084   __ AssertFunction(eax);
   2085   __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
   2086   __ mov(eax, FieldOperand(eax, Map::kPrototypeOffset));
   2087   context()->Plug(eax);
   2088 }
   2089 
   2090 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
   2091   DCHECK(expr->arguments()->length() == 0);
   2092   ExternalReference debug_is_active =
   2093       ExternalReference::debug_is_active_address(isolate());
   2094   __ movzx_b(eax, Operand::StaticVariable(debug_is_active));
   2095   __ SmiTag(eax);
   2096   context()->Plug(eax);
   2097 }
   2098 
   2099 
   2100 void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
   2101   ZoneList<Expression*>* args = expr->arguments();
   2102   DCHECK_EQ(2, args->length());
   2103   VisitForStackValue(args->at(0));
   2104   VisitForStackValue(args->at(1));
   2105 
   2106   Label runtime, done;
   2107 
   2108   __ Allocate(JSIteratorResult::kSize, eax, ecx, edx, &runtime,
   2109               NO_ALLOCATION_FLAGS);
   2110   __ mov(ebx, NativeContextOperand());
   2111   __ mov(ebx, ContextOperand(ebx, Context::ITERATOR_RESULT_MAP_INDEX));
   2112   __ mov(FieldOperand(eax, HeapObject::kMapOffset), ebx);
   2113   __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
   2114          isolate()->factory()->empty_fixed_array());
   2115   __ mov(FieldOperand(eax, JSObject::kElementsOffset),
   2116          isolate()->factory()->empty_fixed_array());
   2117   __ pop(FieldOperand(eax, JSIteratorResult::kDoneOffset));
   2118   __ pop(FieldOperand(eax, JSIteratorResult::kValueOffset));
   2119   STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
   2120   __ jmp(&done, Label::kNear);
   2121 
   2122   __ bind(&runtime);
   2123   CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
   2124 
   2125   __ bind(&done);
   2126   context()->Plug(eax);
   2127 }
   2128 
   2129 
   2130 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
   2131   // Push function.
   2132   __ LoadGlobalFunction(expr->context_index(), eax);
   2133   PushOperand(eax);
   2134 
   2135   // Push undefined as receiver.
   2136   PushOperand(isolate()->factory()->undefined_value());
   2137 }
   2138 
   2139 
   2140 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
   2141   ZoneList<Expression*>* args = expr->arguments();
   2142   int arg_count = args->length();
   2143 
   2144   SetCallPosition(expr);
   2145   __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
   2146   __ Set(eax, arg_count);
   2147   __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
   2148           RelocInfo::CODE_TARGET);
   2149   OperandStackDepthDecrement(arg_count + 1);
   2150   RestoreContext();
   2151 }
   2152 
   2153 
   2154 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
   2155   switch (expr->op()) {
   2156     case Token::DELETE: {
   2157       Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
   2158       Property* property = expr->expression()->AsProperty();
   2159       VariableProxy* proxy = expr->expression()->AsVariableProxy();
   2160 
   2161       if (property != NULL) {
   2162         VisitForStackValue(property->obj());
   2163         VisitForStackValue(property->key());
   2164         CallRuntimeWithOperands(is_strict(language_mode())
   2165                                     ? Runtime::kDeleteProperty_Strict
   2166                                     : Runtime::kDeleteProperty_Sloppy);
   2167         context()->Plug(eax);
   2168       } else if (proxy != NULL) {
   2169         Variable* var = proxy->var();
   2170         // Delete of an unqualified identifier is disallowed in strict mode but
   2171         // "delete this" is allowed.
   2172         bool is_this = var->is_this();
   2173         DCHECK(is_sloppy(language_mode()) || is_this);
   2174         if (var->IsUnallocated()) {
   2175           __ mov(eax, NativeContextOperand());
   2176           __ push(ContextOperand(eax, Context::EXTENSION_INDEX));
   2177           __ push(Immediate(var->name()));
   2178           __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
   2179           context()->Plug(eax);
   2180         } else {
   2181           DCHECK(!var->IsLookupSlot());
   2182           DCHECK(var->IsStackAllocated() || var->IsContextSlot());
   2183           // Result of deleting non-global variables is false.  'this' is
   2184           // not really a variable, though we implement it as one.  The
   2185           // subexpression does not have side effects.
   2186           context()->Plug(is_this);
   2187         }
   2188       } else {
   2189         // Result of deleting non-property, non-variable reference is true.
   2190         // The subexpression may have side effects.
   2191         VisitForEffect(expr->expression());
   2192         context()->Plug(true);
   2193       }
   2194       break;
   2195     }
   2196 
   2197     case Token::VOID: {
   2198       Comment cmnt(masm_, "[ UnaryOperation (VOID)");
   2199       VisitForEffect(expr->expression());
   2200       context()->Plug(isolate()->factory()->undefined_value());
   2201       break;
   2202     }
   2203 
   2204     case Token::NOT: {
   2205       Comment cmnt(masm_, "[ UnaryOperation (NOT)");
   2206       if (context()->IsEffect()) {
   2207         // Unary NOT has no side effects so it's only necessary to visit the
   2208         // subexpression.  Match the optimizing compiler by not branching.
   2209         VisitForEffect(expr->expression());
   2210       } else if (context()->IsTest()) {
   2211         const TestContext* test = TestContext::cast(context());
   2212         // The labels are swapped for the recursive call.
   2213         VisitForControl(expr->expression(),
   2214                         test->false_label(),
   2215                         test->true_label(),
   2216                         test->fall_through());
   2217         context()->Plug(test->true_label(), test->false_label());
   2218       } else {
   2219         // We handle value contexts explicitly rather than simply visiting
   2220         // for control and plugging the control flow into the context,
   2221         // because we need to prepare a pair of extra administrative AST ids
   2222         // for the optimizing compiler.
   2223         DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
   2224         Label materialize_true, materialize_false, done;
   2225         VisitForControl(expr->expression(),
   2226                         &materialize_false,
   2227                         &materialize_true,
   2228                         &materialize_true);
   2229         if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
   2230         __ bind(&materialize_true);
   2231         PrepareForBailoutForId(expr->MaterializeTrueId(),
   2232                                BailoutState::NO_REGISTERS);
   2233         if (context()->IsAccumulatorValue()) {
   2234           __ mov(eax, isolate()->factory()->true_value());
   2235         } else {
   2236           __ Push(isolate()->factory()->true_value());
   2237         }
   2238         __ jmp(&done, Label::kNear);
   2239         __ bind(&materialize_false);
   2240         PrepareForBailoutForId(expr->MaterializeFalseId(),
   2241                                BailoutState::NO_REGISTERS);
   2242         if (context()->IsAccumulatorValue()) {
   2243           __ mov(eax, isolate()->factory()->false_value());
   2244         } else {
   2245           __ Push(isolate()->factory()->false_value());
   2246         }
   2247         __ bind(&done);
   2248       }
   2249       break;
   2250     }
   2251 
   2252     case Token::TYPEOF: {
   2253       Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
   2254       {
   2255         AccumulatorValueContext context(this);
   2256         VisitForTypeofValue(expr->expression());
   2257       }
   2258       __ mov(ebx, eax);
   2259       __ Call(isolate()->builtins()->Typeof(), RelocInfo::CODE_TARGET);
   2260       context()->Plug(eax);
   2261       break;
   2262     }
   2263 
   2264     default:
   2265       UNREACHABLE();
   2266   }
   2267 }
   2268 
   2269 
   2270 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
   2271   DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
   2272 
   2273   Comment cmnt(masm_, "[ CountOperation");
   2274 
   2275   Property* prop = expr->expression()->AsProperty();
   2276   LhsKind assign_type = Property::GetAssignType(prop);
   2277 
   2278   // Evaluate expression and get value.
   2279   if (assign_type == VARIABLE) {
   2280     DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
   2281     AccumulatorValueContext context(this);
   2282     EmitVariableLoad(expr->expression()->AsVariableProxy());
   2283   } else {
   2284     // Reserve space for result of postfix operation.
   2285     if (expr->is_postfix() && !context()->IsEffect()) {
   2286       PushOperand(Smi::kZero);
   2287     }
   2288     switch (assign_type) {
   2289       case NAMED_PROPERTY: {
   2290         // Put the object both on the stack and in the register.
   2291         VisitForStackValue(prop->obj());
   2292         __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
   2293         EmitNamedPropertyLoad(prop);
   2294         break;
   2295       }
   2296 
   2297       case KEYED_PROPERTY: {
   2298         VisitForStackValue(prop->obj());
   2299         VisitForStackValue(prop->key());
   2300         __ mov(LoadDescriptor::ReceiverRegister(),
   2301                Operand(esp, kPointerSize));                       // Object.
   2302         __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0));  // Key.
   2303         EmitKeyedPropertyLoad(prop);
   2304         break;
   2305       }
   2306 
   2307       case NAMED_SUPER_PROPERTY:
   2308       case KEYED_SUPER_PROPERTY:
   2309       case VARIABLE:
   2310         UNREACHABLE();
   2311     }
   2312   }
   2313 
   2314   // We need a second deoptimization point after loading the value
   2315   // in case evaluating the property load my have a side effect.
   2316   if (assign_type == VARIABLE) {
   2317     PrepareForBailout(expr->expression(), BailoutState::TOS_REGISTER);
   2318   } else {
   2319     PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
   2320   }
   2321 
   2322   // Inline smi case if we are in a loop.
   2323   Label done, stub_call;
   2324   JumpPatchSite patch_site(masm_);
   2325   if (ShouldInlineSmiCase(expr->op())) {
   2326     Label slow;
   2327     patch_site.EmitJumpIfNotSmi(eax, &slow, Label::kNear);
   2328 
   2329     // Save result for postfix expressions.
   2330     if (expr->is_postfix()) {
   2331       if (!context()->IsEffect()) {
   2332         // Save the result on the stack. If we have a named or keyed property
   2333         // we store the result under the receiver that is currently on top
   2334         // of the stack.
   2335         switch (assign_type) {
   2336           case VARIABLE:
   2337             __ push(eax);
   2338             break;
   2339           case NAMED_PROPERTY:
   2340             __ mov(Operand(esp, kPointerSize), eax);
   2341             break;
   2342           case KEYED_PROPERTY:
   2343             __ mov(Operand(esp, 2 * kPointerSize), eax);
   2344             break;
   2345           case NAMED_SUPER_PROPERTY:
   2346           case KEYED_SUPER_PROPERTY:
   2347             UNREACHABLE();
   2348             break;
   2349         }
   2350       }
   2351     }
   2352 
   2353     if (expr->op() == Token::INC) {
   2354       __ add(eax, Immediate(Smi::FromInt(1)));
   2355     } else {
   2356       __ sub(eax, Immediate(Smi::FromInt(1)));
   2357     }
   2358     __ j(no_overflow, &done, Label::kNear);
   2359     // Call stub. Undo operation first.
   2360     if (expr->op() == Token::INC) {
   2361       __ sub(eax, Immediate(Smi::FromInt(1)));
   2362     } else {
   2363       __ add(eax, Immediate(Smi::FromInt(1)));
   2364     }
   2365     __ jmp(&stub_call, Label::kNear);
   2366     __ bind(&slow);
   2367   }
   2368 
   2369   // Convert old value into a number.
   2370   __ Call(isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
   2371   RestoreContext();
   2372   PrepareForBailoutForId(expr->ToNumberId(), BailoutState::TOS_REGISTER);
   2373 
   2374   // Save result for postfix expressions.
   2375   if (expr->is_postfix()) {
   2376     if (!context()->IsEffect()) {
   2377       // Save the result on the stack. If we have a named or keyed property
   2378       // we store the result under the receiver that is currently on top
   2379       // of the stack.
   2380       switch (assign_type) {
   2381         case VARIABLE:
   2382           PushOperand(eax);
   2383           break;
   2384         case NAMED_PROPERTY:
   2385           __ mov(Operand(esp, kPointerSize), eax);
   2386           break;
   2387         case KEYED_PROPERTY:
   2388           __ mov(Operand(esp, 2 * kPointerSize), eax);
   2389           break;
   2390         case NAMED_SUPER_PROPERTY:
   2391         case KEYED_SUPER_PROPERTY:
   2392           UNREACHABLE();
   2393           break;
   2394       }
   2395     }
   2396   }
   2397 
   2398   SetExpressionPosition(expr);
   2399 
   2400   // Call stub for +1/-1.
   2401   __ bind(&stub_call);
   2402   __ mov(edx, eax);
   2403   __ mov(eax, Immediate(Smi::FromInt(1)));
   2404   Handle<Code> code =
   2405       CodeFactory::BinaryOpIC(isolate(), expr->binary_op()).code();
   2406   CallIC(code, expr->CountBinOpFeedbackId());
   2407   patch_site.EmitPatchInfo();
   2408   __ bind(&done);
   2409 
   2410   // Store the value returned in eax.
   2411   switch (assign_type) {
   2412     case VARIABLE: {
   2413       VariableProxy* proxy = expr->expression()->AsVariableProxy();
   2414       if (expr->is_postfix()) {
   2415         // Perform the assignment as if via '='.
   2416         { EffectContext context(this);
   2417           EmitVariableAssignment(proxy->var(), Token::ASSIGN, expr->CountSlot(),
   2418                                  proxy->hole_check_mode());
   2419           PrepareForBailoutForId(expr->AssignmentId(),
   2420                                  BailoutState::TOS_REGISTER);
   2421           context.Plug(eax);
   2422         }
   2423         // For all contexts except EffectContext We have the result on
   2424         // top of the stack.
   2425         if (!context()->IsEffect()) {
   2426           context()->PlugTOS();
   2427         }
   2428       } else {
   2429         // Perform the assignment as if via '='.
   2430         EmitVariableAssignment(proxy->var(), Token::ASSIGN, expr->CountSlot(),
   2431                                proxy->hole_check_mode());
   2432         PrepareForBailoutForId(expr->AssignmentId(),
   2433                                BailoutState::TOS_REGISTER);
   2434         context()->Plug(eax);
   2435       }
   2436       break;
   2437     }
   2438     case NAMED_PROPERTY: {
   2439       PopOperand(StoreDescriptor::ReceiverRegister());
   2440       CallStoreIC(expr->CountSlot(), prop->key()->AsLiteral()->value());
   2441       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
   2442       if (expr->is_postfix()) {
   2443         if (!context()->IsEffect()) {
   2444           context()->PlugTOS();
   2445         }
   2446       } else {
   2447         context()->Plug(eax);
   2448       }
   2449       break;
   2450     }
   2451     case KEYED_PROPERTY: {
   2452       PopOperand(StoreDescriptor::NameRegister());
   2453       PopOperand(StoreDescriptor::ReceiverRegister());
   2454       CallKeyedStoreIC(expr->CountSlot());
   2455       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
   2456       if (expr->is_postfix()) {
   2457         // Result is on the stack
   2458         if (!context()->IsEffect()) {
   2459           context()->PlugTOS();
   2460         }
   2461       } else {
   2462         context()->Plug(eax);
   2463       }
   2464       break;
   2465     }
   2466     case NAMED_SUPER_PROPERTY:
   2467     case KEYED_SUPER_PROPERTY:
   2468       UNREACHABLE();
   2469       break;
   2470   }
   2471 }
   2472 
   2473 
   2474 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
   2475                                                  Expression* sub_expr,
   2476                                                  Handle<String> check) {
   2477   Label materialize_true, materialize_false;
   2478   Label* if_true = NULL;
   2479   Label* if_false = NULL;
   2480   Label* fall_through = NULL;
   2481   context()->PrepareTest(&materialize_true, &materialize_false,
   2482                          &if_true, &if_false, &fall_through);
   2483 
   2484   { AccumulatorValueContext context(this);
   2485     VisitForTypeofValue(sub_expr);
   2486   }
   2487   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2488 
   2489   Factory* factory = isolate()->factory();
   2490   if (String::Equals(check, factory->number_string())) {
   2491     __ JumpIfSmi(eax, if_true);
   2492     __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
   2493            isolate()->factory()->heap_number_map());
   2494     Split(equal, if_true, if_false, fall_through);
   2495   } else if (String::Equals(check, factory->string_string())) {
   2496     __ JumpIfSmi(eax, if_false);
   2497     __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edx);
   2498     Split(below, if_true, if_false, fall_through);
   2499   } else if (String::Equals(check, factory->symbol_string())) {
   2500     __ JumpIfSmi(eax, if_false);
   2501     __ CmpObjectType(eax, SYMBOL_TYPE, edx);
   2502     Split(equal, if_true, if_false, fall_through);
   2503   } else if (String::Equals(check, factory->boolean_string())) {
   2504     __ cmp(eax, isolate()->factory()->true_value());
   2505     __ j(equal, if_true);
   2506     __ cmp(eax, isolate()->factory()->false_value());
   2507     Split(equal, if_true, if_false, fall_through);
   2508   } else if (String::Equals(check, factory->undefined_string())) {
   2509     __ cmp(eax, isolate()->factory()->null_value());
   2510     __ j(equal, if_false);
   2511     __ JumpIfSmi(eax, if_false);
   2512     // Check for undetectable objects => true.
   2513     __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
   2514     __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
   2515               Immediate(1 << Map::kIsUndetectable));
   2516     Split(not_zero, if_true, if_false, fall_through);
   2517   } else if (String::Equals(check, factory->function_string())) {
   2518     __ JumpIfSmi(eax, if_false);
   2519     // Check for callable and not undetectable objects => true.
   2520     __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
   2521     __ movzx_b(ecx, FieldOperand(edx, Map::kBitFieldOffset));
   2522     __ and_(ecx, (1 << Map::kIsCallable) | (1 << Map::kIsUndetectable));
   2523     __ cmp(ecx, 1 << Map::kIsCallable);
   2524     Split(equal, if_true, if_false, fall_through);
   2525   } else if (String::Equals(check, factory->object_string())) {
   2526     __ JumpIfSmi(eax, if_false);
   2527     __ cmp(eax, isolate()->factory()->null_value());
   2528     __ j(equal, if_true);
   2529     STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
   2530     __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, edx);
   2531     __ j(below, if_false);
   2532     // Check for callable or undetectable objects => false.
   2533     __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
   2534               Immediate((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
   2535     Split(zero, if_true, if_false, fall_through);
   2536   } else {
   2537     if (if_false != fall_through) __ jmp(if_false);
   2538   }
   2539   context()->Plug(if_true, if_false);
   2540 }
   2541 
   2542 
   2543 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
   2544   Comment cmnt(masm_, "[ CompareOperation");
   2545 
   2546   // First we try a fast inlined version of the compare when one of
   2547   // the operands is a literal.
   2548   if (TryLiteralCompare(expr)) return;
   2549 
   2550   // Always perform the comparison for its control flow.  Pack the result
   2551   // into the expression's context after the comparison is performed.
   2552   Label materialize_true, materialize_false;
   2553   Label* if_true = NULL;
   2554   Label* if_false = NULL;
   2555   Label* fall_through = NULL;
   2556   context()->PrepareTest(&materialize_true, &materialize_false,
   2557                          &if_true, &if_false, &fall_through);
   2558 
   2559   Token::Value op = expr->op();
   2560   VisitForStackValue(expr->left());
   2561   switch (op) {
   2562     case Token::IN:
   2563       VisitForStackValue(expr->right());
   2564       SetExpressionPosition(expr);
   2565       EmitHasProperty();
   2566       PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
   2567       __ cmp(eax, isolate()->factory()->true_value());
   2568       Split(equal, if_true, if_false, fall_through);
   2569       break;
   2570 
   2571     case Token::INSTANCEOF: {
   2572       VisitForAccumulatorValue(expr->right());
   2573       SetExpressionPosition(expr);
   2574       PopOperand(edx);
   2575       __ Call(isolate()->builtins()->InstanceOf(), RelocInfo::CODE_TARGET);
   2576       RestoreContext();
   2577       PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
   2578       __ cmp(eax, isolate()->factory()->true_value());
   2579       Split(equal, if_true, if_false, fall_through);
   2580       break;
   2581     }
   2582 
   2583     default: {
   2584       VisitForAccumulatorValue(expr->right());
   2585       SetExpressionPosition(expr);
   2586       Condition cc = CompareIC::ComputeCondition(op);
   2587       PopOperand(edx);
   2588 
   2589       bool inline_smi_code = ShouldInlineSmiCase(op);
   2590       JumpPatchSite patch_site(masm_);
   2591       if (inline_smi_code) {
   2592         Label slow_case;
   2593         __ mov(ecx, edx);
   2594         __ or_(ecx, eax);
   2595         patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
   2596         __ cmp(edx, eax);
   2597         Split(cc, if_true, if_false, NULL);
   2598         __ bind(&slow_case);
   2599       }
   2600 
   2601       Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
   2602       CallIC(ic, expr->CompareOperationFeedbackId());
   2603       patch_site.EmitPatchInfo();
   2604 
   2605       PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2606       __ test(eax, eax);
   2607       Split(cc, if_true, if_false, fall_through);
   2608     }
   2609   }
   2610 
   2611   // Convert the result of the comparison into one expected for this
   2612   // expression's context.
   2613   context()->Plug(if_true, if_false);
   2614 }
   2615 
   2616 
   2617 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
   2618                                               Expression* sub_expr,
   2619                                               NilValue nil) {
   2620   Label materialize_true, materialize_false;
   2621   Label* if_true = NULL;
   2622   Label* if_false = NULL;
   2623   Label* fall_through = NULL;
   2624   context()->PrepareTest(&materialize_true, &materialize_false,
   2625                          &if_true, &if_false, &fall_through);
   2626 
   2627   VisitForAccumulatorValue(sub_expr);
   2628   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
   2629 
   2630   Handle<Object> nil_value = nil == kNullValue
   2631       ? isolate()->factory()->null_value()
   2632       : isolate()->factory()->undefined_value();
   2633   if (expr->op() == Token::EQ_STRICT) {
   2634     __ cmp(eax, nil_value);
   2635     Split(equal, if_true, if_false, fall_through);
   2636   } else {
   2637     __ JumpIfSmi(eax, if_false);
   2638     __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
   2639     __ test_b(FieldOperand(eax, Map::kBitFieldOffset),
   2640               Immediate(1 << Map::kIsUndetectable));
   2641     Split(not_zero, if_true, if_false, fall_through);
   2642   }
   2643   context()->Plug(if_true, if_false);
   2644 }
   2645 
   2646 
   2647 Register FullCodeGenerator::result_register() {
   2648   return eax;
   2649 }
   2650 
   2651 
   2652 Register FullCodeGenerator::context_register() {
   2653   return esi;
   2654 }
   2655 
   2656 void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) {
   2657   DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
   2658   __ mov(value, Operand(ebp, frame_offset));
   2659 }
   2660 
   2661 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
   2662   DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
   2663   __ mov(Operand(ebp, frame_offset), value);
   2664 }
   2665 
   2666 
   2667 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
   2668   __ mov(dst, ContextOperand(esi, context_index));
   2669 }
   2670 
   2671 
   2672 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
   2673   DeclarationScope* closure_scope = scope()->GetClosureScope();
   2674   if (closure_scope->is_script_scope() ||
   2675       closure_scope->is_module_scope()) {
   2676     // Contexts nested in the native context have a canonical empty function
   2677     // as their closure, not the anonymous closure containing the global
   2678     // code.
   2679     __ mov(eax, NativeContextOperand());
   2680     PushOperand(ContextOperand(eax, Context::CLOSURE_INDEX));
   2681   } else if (closure_scope->is_eval_scope()) {
   2682     // Contexts nested inside eval code have the same closure as the context
   2683     // calling eval, not the anonymous closure containing the eval code.
   2684     // Fetch it from the context.
   2685     PushOperand(ContextOperand(esi, Context::CLOSURE_INDEX));
   2686   } else {
   2687     DCHECK(closure_scope->is_function_scope());
   2688     PushOperand(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
   2689   }
   2690 }
   2691 
   2692 
   2693 #undef __
   2694 
   2695 
   2696 static const byte kJnsInstruction = 0x79;
   2697 static const byte kJnsOffset = 0x11;
   2698 static const byte kNopByteOne = 0x66;
   2699 static const byte kNopByteTwo = 0x90;
   2700 #ifdef DEBUG
   2701 static const byte kCallInstruction = 0xe8;
   2702 #endif
   2703 
   2704 
   2705 void BackEdgeTable::PatchAt(Code* unoptimized_code,
   2706                             Address pc,
   2707                             BackEdgeState target_state,
   2708                             Code* replacement_code) {
   2709   Address call_target_address = pc - kIntSize;
   2710   Address jns_instr_address = call_target_address - 3;
   2711   Address jns_offset_address = call_target_address - 2;
   2712 
   2713   switch (target_state) {
   2714     case INTERRUPT:
   2715       //     sub <profiling_counter>, <delta>  ;; Not changed
   2716       //     jns ok
   2717       //     call <interrupt stub>
   2718       //   ok:
   2719       *jns_instr_address = kJnsInstruction;
   2720       *jns_offset_address = kJnsOffset;
   2721       break;
   2722     case ON_STACK_REPLACEMENT:
   2723       //     sub <profiling_counter>, <delta>  ;; Not changed
   2724       //     nop
   2725       //     nop
   2726       //     call <on-stack replacment>
   2727       //   ok:
   2728       *jns_instr_address = kNopByteOne;
   2729       *jns_offset_address = kNopByteTwo;
   2730       break;
   2731   }
   2732 
   2733   Assembler::set_target_address_at(unoptimized_code->GetIsolate(),
   2734                                    call_target_address, unoptimized_code,
   2735                                    replacement_code->entry());
   2736   unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
   2737       unoptimized_code, call_target_address, replacement_code);
   2738 }
   2739 
   2740 
   2741 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
   2742     Isolate* isolate,
   2743     Code* unoptimized_code,
   2744     Address pc) {
   2745   Address call_target_address = pc - kIntSize;
   2746   Address jns_instr_address = call_target_address - 3;
   2747   DCHECK_EQ(kCallInstruction, *(call_target_address - 1));
   2748 
   2749   if (*jns_instr_address == kJnsInstruction) {
   2750     DCHECK_EQ(kJnsOffset, *(call_target_address - 2));
   2751     DCHECK_EQ(isolate->builtins()->InterruptCheck()->entry(),
   2752               Assembler::target_address_at(call_target_address,
   2753                                            unoptimized_code));
   2754     return INTERRUPT;
   2755   }
   2756 
   2757   DCHECK_EQ(kNopByteOne, *jns_instr_address);
   2758   DCHECK_EQ(kNopByteTwo, *(call_target_address - 2));
   2759 
   2760   DCHECK_EQ(
   2761       isolate->builtins()->OnStackReplacement()->entry(),
   2762       Assembler::target_address_at(call_target_address, unoptimized_code));
   2763   return ON_STACK_REPLACEMENT;
   2764 }
   2765 
   2766 
   2767 }  // namespace internal
   2768 }  // namespace v8
   2769 
   2770 #endif  // V8_TARGET_ARCH_IA32
   2771