Home | History | Annotate | Download | only in x87
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #if V8_TARGET_ARCH_X87
      6 
      7 #include "src/base/bits.h"
      8 #include "src/bootstrapper.h"
      9 #include "src/code-stubs.h"
     10 #include "src/codegen.h"
     11 #include "src/ic/handler-compiler.h"
     12 #include "src/ic/ic.h"
     13 #include "src/ic/stub-cache.h"
     14 #include "src/isolate.h"
     15 #include "src/regexp/jsregexp.h"
     16 #include "src/regexp/regexp-macro-assembler.h"
     17 #include "src/runtime/runtime.h"
     18 #include "src/x87/code-stubs-x87.h"
     19 #include "src/x87/frames-x87.h"
     20 
     21 namespace v8 {
     22 namespace internal {
     23 
     24 
     25 static void InitializeArrayConstructorDescriptor(
     26     Isolate* isolate, CodeStubDescriptor* descriptor,
     27     int constant_stack_parameter_count) {
     28   // register state
     29   // eax -- number of arguments
     30   // edi -- function
     31   // ebx -- allocation site with elements kind
     32   Address deopt_handler = Runtime::FunctionForId(
     33       Runtime::kArrayConstructor)->entry;
     34 
     35   if (constant_stack_parameter_count == 0) {
     36     descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
     37                            JS_FUNCTION_STUB_MODE);
     38   } else {
     39     descriptor->Initialize(eax, deopt_handler, constant_stack_parameter_count,
     40                            JS_FUNCTION_STUB_MODE);
     41   }
     42 }
     43 
     44 
     45 static void InitializeInternalArrayConstructorDescriptor(
     46     Isolate* isolate, CodeStubDescriptor* descriptor,
     47     int constant_stack_parameter_count) {
     48   // register state
     49   // eax -- number of arguments
     50   // edi -- constructor function
     51   Address deopt_handler = Runtime::FunctionForId(
     52       Runtime::kInternalArrayConstructor)->entry;
     53 
     54   if (constant_stack_parameter_count == 0) {
     55     descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
     56                            JS_FUNCTION_STUB_MODE);
     57   } else {
     58     descriptor->Initialize(eax, deopt_handler, constant_stack_parameter_count,
     59                            JS_FUNCTION_STUB_MODE);
     60   }
     61 }
     62 
     63 
     64 void ArrayNoArgumentConstructorStub::InitializeDescriptor(
     65     CodeStubDescriptor* descriptor) {
     66   InitializeArrayConstructorDescriptor(isolate(), descriptor, 0);
     67 }
     68 
     69 
     70 void ArraySingleArgumentConstructorStub::InitializeDescriptor(
     71     CodeStubDescriptor* descriptor) {
     72   InitializeArrayConstructorDescriptor(isolate(), descriptor, 1);
     73 }
     74 
     75 
     76 void ArrayNArgumentsConstructorStub::InitializeDescriptor(
     77     CodeStubDescriptor* descriptor) {
     78   InitializeArrayConstructorDescriptor(isolate(), descriptor, -1);
     79 }
     80 
     81 
     82 void InternalArrayNoArgumentConstructorStub::InitializeDescriptor(
     83     CodeStubDescriptor* descriptor) {
     84   InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, 0);
     85 }
     86 
     87 
     88 void InternalArraySingleArgumentConstructorStub::InitializeDescriptor(
     89     CodeStubDescriptor* descriptor) {
     90   InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, 1);
     91 }
     92 
     93 
     94 void InternalArrayNArgumentsConstructorStub::InitializeDescriptor(
     95     CodeStubDescriptor* descriptor) {
     96   InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, -1);
     97 }
     98 
     99 
    100 #define __ ACCESS_MASM(masm)
    101 
    102 
    103 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm,
    104                                                ExternalReference miss) {
    105   // Update the static counter each time a new code stub is generated.
    106   isolate()->counters()->code_stubs()->Increment();
    107 
    108   CallInterfaceDescriptor descriptor = GetCallInterfaceDescriptor();
    109   int param_count = descriptor.GetRegisterParameterCount();
    110   {
    111     // Call the runtime system in a fresh internal frame.
    112     FrameScope scope(masm, StackFrame::INTERNAL);
    113     DCHECK(param_count == 0 ||
    114            eax.is(descriptor.GetRegisterParameter(param_count - 1)));
    115     // Push arguments
    116     for (int i = 0; i < param_count; ++i) {
    117       __ push(descriptor.GetRegisterParameter(i));
    118     }
    119     __ CallExternalReference(miss, param_count);
    120   }
    121 
    122   __ ret(0);
    123 }
    124 
    125 
    126 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
    127   // We don't allow a GC during a store buffer overflow so there is no need to
    128   // store the registers in any particular way, but we do have to store and
    129   // restore them.
    130   __ pushad();
    131   if (save_doubles()) {
    132     // Save FPU stat in m108byte.
    133     __ sub(esp, Immediate(108));
    134     __ fnsave(Operand(esp, 0));
    135   }
    136   const int argument_count = 1;
    137 
    138   AllowExternalCallThatCantCauseGC scope(masm);
    139   __ PrepareCallCFunction(argument_count, ecx);
    140   __ mov(Operand(esp, 0 * kPointerSize),
    141          Immediate(ExternalReference::isolate_address(isolate())));
    142   __ CallCFunction(
    143       ExternalReference::store_buffer_overflow_function(isolate()),
    144       argument_count);
    145   if (save_doubles()) {
    146     // Restore FPU stat in m108byte.
    147     __ frstor(Operand(esp, 0));
    148     __ add(esp, Immediate(108));
    149   }
    150   __ popad();
    151   __ ret(0);
    152 }
    153 
    154 
    155 class FloatingPointHelper : public AllStatic {
    156  public:
    157   enum ArgLocation {
    158     ARGS_ON_STACK,
    159     ARGS_IN_REGISTERS
    160   };
    161 
    162   // Code pattern for loading a floating point value. Input value must
    163   // be either a smi or a heap number object (fp value). Requirements:
    164   // operand in register number. Returns operand as floating point number
    165   // on FPU stack.
    166   static void LoadFloatOperand(MacroAssembler* masm, Register number);
    167 
    168   // Test if operands are smi or number objects (fp). Requirements:
    169   // operand_1 in eax, operand_2 in edx; falls through on float
    170   // operands, jumps to the non_float label otherwise.
    171   static void CheckFloatOperands(MacroAssembler* masm,
    172                                  Label* non_float,
    173                                  Register scratch);
    174 };
    175 
    176 
    177 void DoubleToIStub::Generate(MacroAssembler* masm) {
    178   Register input_reg = this->source();
    179   Register final_result_reg = this->destination();
    180   DCHECK(is_truncating());
    181 
    182   Label check_negative, process_64_bits, done, done_no_stash;
    183 
    184   int double_offset = offset();
    185 
    186   // Account for return address and saved regs if input is esp.
    187   if (input_reg.is(esp)) double_offset += 3 * kPointerSize;
    188 
    189   MemOperand mantissa_operand(MemOperand(input_reg, double_offset));
    190   MemOperand exponent_operand(MemOperand(input_reg,
    191                                          double_offset + kDoubleSize / 2));
    192 
    193   Register scratch1;
    194   {
    195     Register scratch_candidates[3] = { ebx, edx, edi };
    196     for (int i = 0; i < 3; i++) {
    197       scratch1 = scratch_candidates[i];
    198       if (!final_result_reg.is(scratch1) && !input_reg.is(scratch1)) break;
    199     }
    200   }
    201   // Since we must use ecx for shifts below, use some other register (eax)
    202   // to calculate the result if ecx is the requested return register.
    203   Register result_reg = final_result_reg.is(ecx) ? eax : final_result_reg;
    204   // Save ecx if it isn't the return register and therefore volatile, or if it
    205   // is the return register, then save the temp register we use in its stead for
    206   // the result.
    207   Register save_reg = final_result_reg.is(ecx) ? eax : ecx;
    208   __ push(scratch1);
    209   __ push(save_reg);
    210 
    211   bool stash_exponent_copy = !input_reg.is(esp);
    212   __ mov(scratch1, mantissa_operand);
    213   __ mov(ecx, exponent_operand);
    214   if (stash_exponent_copy) __ push(ecx);
    215 
    216   __ and_(ecx, HeapNumber::kExponentMask);
    217   __ shr(ecx, HeapNumber::kExponentShift);
    218   __ lea(result_reg, MemOperand(ecx, -HeapNumber::kExponentBias));
    219   __ cmp(result_reg, Immediate(HeapNumber::kMantissaBits));
    220   __ j(below, &process_64_bits);
    221 
    222   // Result is entirely in lower 32-bits of mantissa
    223   int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize;
    224   __ sub(ecx, Immediate(delta));
    225   __ xor_(result_reg, result_reg);
    226   __ cmp(ecx, Immediate(31));
    227   __ j(above, &done);
    228   __ shl_cl(scratch1);
    229   __ jmp(&check_negative);
    230 
    231   __ bind(&process_64_bits);
    232   // Result must be extracted from shifted 32-bit mantissa
    233   __ sub(ecx, Immediate(delta));
    234   __ neg(ecx);
    235   if (stash_exponent_copy) {
    236     __ mov(result_reg, MemOperand(esp, 0));
    237   } else {
    238     __ mov(result_reg, exponent_operand);
    239   }
    240   __ and_(result_reg,
    241           Immediate(static_cast<uint32_t>(Double::kSignificandMask >> 32)));
    242   __ add(result_reg,
    243          Immediate(static_cast<uint32_t>(Double::kHiddenBit >> 32)));
    244   __ shrd(result_reg, scratch1);
    245   __ shr_cl(result_reg);
    246   __ test(ecx, Immediate(32));
    247   {
    248     Label skip_mov;
    249     __ j(equal, &skip_mov, Label::kNear);
    250     __ mov(scratch1, result_reg);
    251     __ bind(&skip_mov);
    252   }
    253 
    254   // If the double was negative, negate the integer result.
    255   __ bind(&check_negative);
    256   __ mov(result_reg, scratch1);
    257   __ neg(result_reg);
    258   if (stash_exponent_copy) {
    259     __ cmp(MemOperand(esp, 0), Immediate(0));
    260   } else {
    261     __ cmp(exponent_operand, Immediate(0));
    262   }
    263   {
    264     Label skip_mov;
    265     __ j(less_equal, &skip_mov, Label::kNear);
    266     __ mov(result_reg, scratch1);
    267     __ bind(&skip_mov);
    268   }
    269 
    270   // Restore registers
    271   __ bind(&done);
    272   if (stash_exponent_copy) {
    273     __ add(esp, Immediate(kDoubleSize / 2));
    274   }
    275   __ bind(&done_no_stash);
    276   if (!final_result_reg.is(result_reg)) {
    277     DCHECK(final_result_reg.is(ecx));
    278     __ mov(final_result_reg, result_reg);
    279   }
    280   __ pop(save_reg);
    281   __ pop(scratch1);
    282   __ ret(0);
    283 }
    284 
    285 
    286 void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm,
    287                                            Register number) {
    288   Label load_smi, done;
    289 
    290   __ JumpIfSmi(number, &load_smi, Label::kNear);
    291   __ fld_d(FieldOperand(number, HeapNumber::kValueOffset));
    292   __ jmp(&done, Label::kNear);
    293 
    294   __ bind(&load_smi);
    295   __ SmiUntag(number);
    296   __ push(number);
    297   __ fild_s(Operand(esp, 0));
    298   __ pop(number);
    299 
    300   __ bind(&done);
    301 }
    302 
    303 
    304 void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm,
    305                                              Label* non_float,
    306                                              Register scratch) {
    307   Label test_other, done;
    308   // Test if both operands are floats or smi -> scratch=k_is_float;
    309   // Otherwise scratch = k_not_float.
    310   __ JumpIfSmi(edx, &test_other, Label::kNear);
    311   __ mov(scratch, FieldOperand(edx, HeapObject::kMapOffset));
    312   Factory* factory = masm->isolate()->factory();
    313   __ cmp(scratch, factory->heap_number_map());
    314   __ j(not_equal, non_float);  // argument in edx is not a number -> NaN
    315 
    316   __ bind(&test_other);
    317   __ JumpIfSmi(eax, &done, Label::kNear);
    318   __ mov(scratch, FieldOperand(eax, HeapObject::kMapOffset));
    319   __ cmp(scratch, factory->heap_number_map());
    320   __ j(not_equal, non_float);  // argument in eax is not a number -> NaN
    321 
    322   // Fall-through: Both operands are numbers.
    323   __ bind(&done);
    324 }
    325 
    326 
    327 void MathPowStub::Generate(MacroAssembler* masm) {
    328   const Register base = edx;
    329   const Register scratch = ecx;
    330   Counters* counters = isolate()->counters();
    331   Label call_runtime;
    332 
    333   // We will call runtime helper function directly.
    334   if (exponent_type() == ON_STACK) {
    335     // The arguments are still on the stack.
    336     __ bind(&call_runtime);
    337     __ TailCallRuntime(Runtime::kMathPowRT);
    338 
    339     // The stub is called from non-optimized code, which expects the result
    340     // as heap number in exponent.
    341     __ AllocateHeapNumber(eax, scratch, base, &call_runtime);
    342     __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
    343     __ IncrementCounter(counters->math_pow(), 1);
    344     __ ret(2 * kPointerSize);
    345   } else {
    346     // Currently it's only called from full-compiler and exponent type is
    347     // ON_STACK.
    348     UNIMPLEMENTED();
    349   }
    350 }
    351 
    352 
    353 void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
    354   Label miss;
    355   Register receiver = LoadDescriptor::ReceiverRegister();
    356   // With careful management, we won't have to save slot and vector on
    357   // the stack. Simply handle the possibly missing case first.
    358   // TODO(mvstanton): this code can be more efficient.
    359   __ cmp(FieldOperand(receiver, JSFunction::kPrototypeOrInitialMapOffset),
    360          Immediate(isolate()->factory()->the_hole_value()));
    361   __ j(equal, &miss);
    362   __ TryGetFunctionPrototype(receiver, eax, ebx, &miss);
    363   __ ret(0);
    364 
    365   __ bind(&miss);
    366   PropertyAccessCompiler::TailCallBuiltin(
    367       masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC));
    368 }
    369 
    370 
    371 void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) {
    372   // Return address is on the stack.
    373   Label slow;
    374 
    375   Register receiver = LoadDescriptor::ReceiverRegister();
    376   Register key = LoadDescriptor::NameRegister();
    377   Register scratch = eax;
    378   DCHECK(!scratch.is(receiver) && !scratch.is(key));
    379 
    380   // Check that the key is an array index, that is Uint32.
    381   __ test(key, Immediate(kSmiTagMask | kSmiSignMask));
    382   __ j(not_zero, &slow);
    383 
    384   // Everything is fine, call runtime.
    385   __ pop(scratch);
    386   __ push(receiver);  // receiver
    387   __ push(key);       // key
    388   __ push(scratch);   // return address
    389 
    390   // Perform tail call to the entry.
    391   __ TailCallRuntime(Runtime::kLoadElementWithInterceptor);
    392 
    393   __ bind(&slow);
    394   PropertyAccessCompiler::TailCallBuiltin(
    395       masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
    396 }
    397 
    398 
    399 void LoadIndexedStringStub::Generate(MacroAssembler* masm) {
    400   // Return address is on the stack.
    401   Label miss;
    402 
    403   Register receiver = LoadDescriptor::ReceiverRegister();
    404   Register index = LoadDescriptor::NameRegister();
    405   Register scratch = edi;
    406   DCHECK(!scratch.is(receiver) && !scratch.is(index));
    407   Register result = eax;
    408   DCHECK(!result.is(scratch));
    409   DCHECK(!scratch.is(LoadWithVectorDescriptor::VectorRegister()) &&
    410          result.is(LoadDescriptor::SlotRegister()));
    411 
    412   // StringCharAtGenerator doesn't use the result register until it's passed
    413   // the different miss possibilities. If it did, we would have a conflict
    414   // when FLAG_vector_ics is true.
    415 
    416   StringCharAtGenerator char_at_generator(receiver, index, scratch, result,
    417                                           &miss,  // When not a string.
    418                                           &miss,  // When not a number.
    419                                           &miss,  // When index out of range.
    420                                           STRING_INDEX_IS_ARRAY_INDEX,
    421                                           RECEIVER_IS_STRING);
    422   char_at_generator.GenerateFast(masm);
    423   __ ret(0);
    424 
    425   StubRuntimeCallHelper call_helper;
    426   char_at_generator.GenerateSlow(masm, PART_OF_IC_HANDLER, call_helper);
    427 
    428   __ bind(&miss);
    429   PropertyAccessCompiler::TailCallBuiltin(
    430       masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
    431 }
    432 
    433 
    434 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
    435   // The key is in edx and the parameter count is in eax.
    436   DCHECK(edx.is(ArgumentsAccessReadDescriptor::index()));
    437   DCHECK(eax.is(ArgumentsAccessReadDescriptor::parameter_count()));
    438 
    439   // The displacement is used for skipping the frame pointer on the
    440   // stack. It is the offset of the last parameter (if any) relative
    441   // to the frame pointer.
    442   static const int kDisplacement = 1 * kPointerSize;
    443 
    444   // Check that the key is a smi.
    445   Label slow;
    446   __ JumpIfNotSmi(edx, &slow, Label::kNear);
    447 
    448   // Check if the calling frame is an arguments adaptor frame.
    449   Label adaptor;
    450   __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
    451   __ mov(ecx, Operand(ebx, StandardFrameConstants::kContextOffset));
    452   __ cmp(ecx, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
    453   __ j(equal, &adaptor, Label::kNear);
    454 
    455   // Check index against formal parameters count limit passed in
    456   // through register eax. Use unsigned comparison to get negative
    457   // check for free.
    458   __ cmp(edx, eax);
    459   __ j(above_equal, &slow, Label::kNear);
    460 
    461   // Read the argument from the stack and return it.
    462   STATIC_ASSERT(kSmiTagSize == 1);
    463   STATIC_ASSERT(kSmiTag == 0);  // Shifting code depends on these.
    464   __ lea(ebx, Operand(ebp, eax, times_2, 0));
    465   __ neg(edx);
    466   __ mov(eax, Operand(ebx, edx, times_2, kDisplacement));
    467   __ ret(0);
    468 
    469   // Arguments adaptor case: Check index against actual arguments
    470   // limit found in the arguments adaptor frame. Use unsigned
    471   // comparison to get negative check for free.
    472   __ bind(&adaptor);
    473   __ mov(ecx, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
    474   __ cmp(edx, ecx);
    475   __ j(above_equal, &slow, Label::kNear);
    476 
    477   // Read the argument from the stack and return it.
    478   STATIC_ASSERT(kSmiTagSize == 1);
    479   STATIC_ASSERT(kSmiTag == 0);  // Shifting code depends on these.
    480   __ lea(ebx, Operand(ebx, ecx, times_2, 0));
    481   __ neg(edx);
    482   __ mov(eax, Operand(ebx, edx, times_2, kDisplacement));
    483   __ ret(0);
    484 
    485   // Slow-case: Handle non-smi or out-of-bounds access to arguments
    486   // by calling the runtime system.
    487   __ bind(&slow);
    488   __ pop(ebx);  // Return address.
    489   __ push(edx);
    490   __ push(ebx);
    491   __ TailCallRuntime(Runtime::kArguments);
    492 }
    493 
    494 
    495 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) {
    496   // ecx : number of parameters (tagged)
    497   // edx : parameters pointer
    498   // edi : function
    499   // esp[0] : return address
    500 
    501   DCHECK(edi.is(ArgumentsAccessNewDescriptor::function()));
    502   DCHECK(ecx.is(ArgumentsAccessNewDescriptor::parameter_count()));
    503   DCHECK(edx.is(ArgumentsAccessNewDescriptor::parameter_pointer()));
    504 
    505   // Check if the calling frame is an arguments adaptor frame.
    506   Label runtime;
    507   __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
    508   __ mov(eax, Operand(ebx, StandardFrameConstants::kContextOffset));
    509   __ cmp(eax, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
    510   __ j(not_equal, &runtime, Label::kNear);
    511 
    512   // Patch the arguments.length and the parameters pointer.
    513   __ mov(ecx, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
    514   __ lea(edx,
    515          Operand(ebx, ecx, times_2, StandardFrameConstants::kCallerSPOffset));
    516 
    517   __ bind(&runtime);
    518   __ pop(eax);   // Pop return address.
    519   __ push(edi);  // Push function.
    520   __ push(edx);  // Push parameters pointer.
    521   __ push(ecx);  // Push parameter count.
    522   __ push(eax);  // Push return address.
    523   __ TailCallRuntime(Runtime::kNewSloppyArguments);
    524 }
    525 
    526 
    527 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
    528   // ecx : number of parameters (tagged)
    529   // edx : parameters pointer
    530   // edi : function
    531   // esp[0] : return address
    532 
    533   DCHECK(edi.is(ArgumentsAccessNewDescriptor::function()));
    534   DCHECK(ecx.is(ArgumentsAccessNewDescriptor::parameter_count()));
    535   DCHECK(edx.is(ArgumentsAccessNewDescriptor::parameter_pointer()));
    536 
    537   // Check if the calling frame is an arguments adaptor frame.
    538   Label adaptor_frame, try_allocate, runtime;
    539   __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
    540   __ mov(eax, Operand(ebx, StandardFrameConstants::kContextOffset));
    541   __ cmp(eax, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
    542   __ j(equal, &adaptor_frame, Label::kNear);
    543 
    544   // No adaptor, parameter count = argument count.
    545   __ mov(ebx, ecx);
    546   __ push(ecx);
    547   __ jmp(&try_allocate, Label::kNear);
    548 
    549   // We have an adaptor frame. Patch the parameters pointer.
    550   __ bind(&adaptor_frame);
    551   __ mov(ebx, ecx);
    552   __ push(ecx);
    553   __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
    554   __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
    555   __ lea(edx, Operand(edx, ecx, times_2,
    556                       StandardFrameConstants::kCallerSPOffset));
    557 
    558   // ebx = parameter count (tagged)
    559   // ecx = argument count (smi-tagged)
    560   // Compute the mapped parameter count = min(ebx, ecx) in ebx.
    561   __ cmp(ebx, ecx);
    562   __ j(less_equal, &try_allocate, Label::kNear);
    563   __ mov(ebx, ecx);
    564 
    565   // Save mapped parameter count and function.
    566   __ bind(&try_allocate);
    567   __ push(edi);
    568   __ push(ebx);
    569 
    570   // Compute the sizes of backing store, parameter map, and arguments object.
    571   // 1. Parameter map, has 2 extra words containing context and backing store.
    572   const int kParameterMapHeaderSize =
    573       FixedArray::kHeaderSize + 2 * kPointerSize;
    574   Label no_parameter_map;
    575   __ test(ebx, ebx);
    576   __ j(zero, &no_parameter_map, Label::kNear);
    577   __ lea(ebx, Operand(ebx, times_2, kParameterMapHeaderSize));
    578   __ bind(&no_parameter_map);
    579 
    580   // 2. Backing store.
    581   __ lea(ebx, Operand(ebx, ecx, times_2, FixedArray::kHeaderSize));
    582 
    583   // 3. Arguments object.
    584   __ add(ebx, Immediate(Heap::kSloppyArgumentsObjectSize));
    585 
    586   // Do the allocation of all three objects in one go.
    587   __ Allocate(ebx, eax, edi, no_reg, &runtime, TAG_OBJECT);
    588 
    589   // eax = address of new object(s) (tagged)
    590   // ecx = argument count (smi-tagged)
    591   // esp[0] = mapped parameter count (tagged)
    592   // esp[4] = function
    593   // esp[8] = parameter count (tagged)
    594   // Get the arguments map from the current native context into edi.
    595   Label has_mapped_parameters, instantiate;
    596   __ mov(edi, NativeContextOperand());
    597   __ mov(ebx, Operand(esp, 0 * kPointerSize));
    598   __ test(ebx, ebx);
    599   __ j(not_zero, &has_mapped_parameters, Label::kNear);
    600   __ mov(
    601       edi,
    602       Operand(edi, Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX)));
    603   __ jmp(&instantiate, Label::kNear);
    604 
    605   __ bind(&has_mapped_parameters);
    606   __ mov(edi, Operand(edi, Context::SlotOffset(
    607                                Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX)));
    608   __ bind(&instantiate);
    609 
    610   // eax = address of new object (tagged)
    611   // ebx = mapped parameter count (tagged)
    612   // ecx = argument count (smi-tagged)
    613   // edi = address of arguments map (tagged)
    614   // esp[0] = mapped parameter count (tagged)
    615   // esp[4] = function
    616   // esp[8] = parameter count (tagged)
    617   // Copy the JS object part.
    618   __ mov(FieldOperand(eax, JSObject::kMapOffset), edi);
    619   __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
    620          masm->isolate()->factory()->empty_fixed_array());
    621   __ mov(FieldOperand(eax, JSObject::kElementsOffset),
    622          masm->isolate()->factory()->empty_fixed_array());
    623 
    624   // Set up the callee in-object property.
    625   STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1);
    626   __ mov(edi, Operand(esp, 1 * kPointerSize));
    627   __ AssertNotSmi(edi);
    628   __ mov(FieldOperand(eax, JSObject::kHeaderSize +
    629                                Heap::kArgumentsCalleeIndex * kPointerSize),
    630          edi);
    631 
    632   // Use the length (smi tagged) and set that as an in-object property too.
    633   __ AssertSmi(ecx);
    634   STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
    635   __ mov(FieldOperand(eax, JSObject::kHeaderSize +
    636                       Heap::kArgumentsLengthIndex * kPointerSize),
    637          ecx);
    638 
    639   // Set up the elements pointer in the allocated arguments object.
    640   // If we allocated a parameter map, edi will point there, otherwise to the
    641   // backing store.
    642   __ lea(edi, Operand(eax, Heap::kSloppyArgumentsObjectSize));
    643   __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
    644 
    645   // eax = address of new object (tagged)
    646   // ebx = mapped parameter count (tagged)
    647   // ecx = argument count (tagged)
    648   // edx = address of receiver argument
    649   // edi = address of parameter map or backing store (tagged)
    650   // esp[0] = mapped parameter count (tagged)
    651   // esp[4] = function
    652   // esp[8] = parameter count (tagged)
    653   // Free two registers.
    654   __ push(edx);
    655   __ push(eax);
    656 
    657   // Initialize parameter map. If there are no mapped arguments, we're done.
    658   Label skip_parameter_map;
    659   __ test(ebx, ebx);
    660   __ j(zero, &skip_parameter_map);
    661 
    662   __ mov(FieldOperand(edi, FixedArray::kMapOffset),
    663          Immediate(isolate()->factory()->sloppy_arguments_elements_map()));
    664   __ lea(eax, Operand(ebx, reinterpret_cast<intptr_t>(Smi::FromInt(2))));
    665   __ mov(FieldOperand(edi, FixedArray::kLengthOffset), eax);
    666   __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 0 * kPointerSize), esi);
    667   __ lea(eax, Operand(edi, ebx, times_2, kParameterMapHeaderSize));
    668   __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 1 * kPointerSize), eax);
    669 
    670   // Copy the parameter slots and the holes in the arguments.
    671   // We need to fill in mapped_parameter_count slots. They index the context,
    672   // where parameters are stored in reverse order, at
    673   //   MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
    674   // The mapped parameter thus need to get indices
    675   //   MIN_CONTEXT_SLOTS+parameter_count-1 ..
    676   //       MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
    677   // We loop from right to left.
    678   Label parameters_loop, parameters_test;
    679   __ push(ecx);
    680   __ mov(eax, Operand(esp, 3 * kPointerSize));
    681   __ mov(ebx, Immediate(Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
    682   __ add(ebx, Operand(esp, 5 * kPointerSize));
    683   __ sub(ebx, eax);
    684   __ mov(ecx, isolate()->factory()->the_hole_value());
    685   __ mov(edx, edi);
    686   __ lea(edi, Operand(edi, eax, times_2, kParameterMapHeaderSize));
    687   // eax = loop variable (tagged)
    688   // ebx = mapping index (tagged)
    689   // ecx = the hole value
    690   // edx = address of parameter map (tagged)
    691   // edi = address of backing store (tagged)
    692   // esp[0] = argument count (tagged)
    693   // esp[4] = address of new object (tagged)
    694   // esp[8] = address of receiver argument
    695   // esp[12] = mapped parameter count (tagged)
    696   // esp[16] = function
    697   // esp[20] = parameter count (tagged)
    698   __ jmp(&parameters_test, Label::kNear);
    699 
    700   __ bind(&parameters_loop);
    701   __ sub(eax, Immediate(Smi::FromInt(1)));
    702   __ mov(FieldOperand(edx, eax, times_2, kParameterMapHeaderSize), ebx);
    703   __ mov(FieldOperand(edi, eax, times_2, FixedArray::kHeaderSize), ecx);
    704   __ add(ebx, Immediate(Smi::FromInt(1)));
    705   __ bind(&parameters_test);
    706   __ test(eax, eax);
    707   __ j(not_zero, &parameters_loop, Label::kNear);
    708   __ pop(ecx);
    709 
    710   __ bind(&skip_parameter_map);
    711 
    712   // ecx = argument count (tagged)
    713   // edi = address of backing store (tagged)
    714   // esp[0] = address of new object (tagged)
    715   // esp[4] = address of receiver argument
    716   // esp[8] = mapped parameter count (tagged)
    717   // esp[12] = function
    718   // esp[16] = parameter count (tagged)
    719   // Copy arguments header and remaining slots (if there are any).
    720   __ mov(FieldOperand(edi, FixedArray::kMapOffset),
    721          Immediate(isolate()->factory()->fixed_array_map()));
    722   __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
    723 
    724   Label arguments_loop, arguments_test;
    725   __ mov(ebx, Operand(esp, 2 * kPointerSize));
    726   __ mov(edx, Operand(esp, 1 * kPointerSize));
    727   __ sub(edx, ebx);  // Is there a smarter way to do negative scaling?
    728   __ sub(edx, ebx);
    729   __ jmp(&arguments_test, Label::kNear);
    730 
    731   __ bind(&arguments_loop);
    732   __ sub(edx, Immediate(kPointerSize));
    733   __ mov(eax, Operand(edx, 0));
    734   __ mov(FieldOperand(edi, ebx, times_2, FixedArray::kHeaderSize), eax);
    735   __ add(ebx, Immediate(Smi::FromInt(1)));
    736 
    737   __ bind(&arguments_test);
    738   __ cmp(ebx, ecx);
    739   __ j(less, &arguments_loop, Label::kNear);
    740 
    741   // Restore.
    742   __ pop(eax);  // Address of arguments object.
    743   __ Drop(4);
    744 
    745   // Return.
    746   __ ret(0);
    747 
    748   // Do the runtime call to allocate the arguments object.
    749   __ bind(&runtime);
    750   __ pop(eax);   // Remove saved mapped parameter count.
    751   __ pop(edi);   // Pop saved function.
    752   __ pop(eax);   // Remove saved parameter count.
    753   __ pop(eax);   // Pop return address.
    754   __ push(edi);  // Push function.
    755   __ push(edx);  // Push parameters pointer.
    756   __ push(ecx);  // Push parameter count.
    757   __ push(eax);  // Push return address.
    758   __ TailCallRuntime(Runtime::kNewSloppyArguments);
    759 }
    760 
    761 
    762 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
    763   // ecx : number of parameters (tagged)
    764   // edx : parameters pointer
    765   // edi : function
    766   // esp[0] : return address
    767 
    768   DCHECK(edi.is(ArgumentsAccessNewDescriptor::function()));
    769   DCHECK(ecx.is(ArgumentsAccessNewDescriptor::parameter_count()));
    770   DCHECK(edx.is(ArgumentsAccessNewDescriptor::parameter_pointer()));
    771 
    772   // Check if the calling frame is an arguments adaptor frame.
    773   Label try_allocate, runtime;
    774   __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
    775   __ mov(eax, Operand(ebx, StandardFrameConstants::kContextOffset));
    776   __ cmp(eax, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
    777   __ j(not_equal, &try_allocate, Label::kNear);
    778 
    779   // Patch the arguments.length and the parameters pointer.
    780   __ mov(ecx, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
    781   __ lea(edx,
    782          Operand(ebx, ecx, times_2, StandardFrameConstants::kCallerSPOffset));
    783 
    784   // Try the new space allocation. Start out with computing the size of
    785   // the arguments object and the elements array.
    786   Label add_arguments_object;
    787   __ bind(&try_allocate);
    788   __ mov(eax, ecx);
    789   __ test(eax, eax);
    790   __ j(zero, &add_arguments_object, Label::kNear);
    791   __ lea(eax, Operand(eax, times_2, FixedArray::kHeaderSize));
    792   __ bind(&add_arguments_object);
    793   __ add(eax, Immediate(Heap::kStrictArgumentsObjectSize));
    794 
    795   // Do the allocation of both objects in one go.
    796   __ Allocate(eax, eax, ebx, no_reg, &runtime, TAG_OBJECT);
    797 
    798   // Get the arguments map from the current native context.
    799   __ mov(edi, NativeContextOperand());
    800   __ mov(edi, ContextOperand(edi, Context::STRICT_ARGUMENTS_MAP_INDEX));
    801 
    802   __ mov(FieldOperand(eax, JSObject::kMapOffset), edi);
    803   __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
    804          masm->isolate()->factory()->empty_fixed_array());
    805   __ mov(FieldOperand(eax, JSObject::kElementsOffset),
    806          masm->isolate()->factory()->empty_fixed_array());
    807 
    808   // Get the length (smi tagged) and set that as an in-object property too.
    809   STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
    810   __ AssertSmi(ecx);
    811   __ mov(FieldOperand(eax, JSObject::kHeaderSize +
    812                       Heap::kArgumentsLengthIndex * kPointerSize),
    813          ecx);
    814 
    815   // If there are no actual arguments, we're done.
    816   Label done;
    817   __ test(ecx, ecx);
    818   __ j(zero, &done, Label::kNear);
    819 
    820   // Set up the elements pointer in the allocated arguments object and
    821   // initialize the header in the elements fixed array.
    822   __ lea(edi, Operand(eax, Heap::kStrictArgumentsObjectSize));
    823   __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
    824   __ mov(FieldOperand(edi, FixedArray::kMapOffset),
    825          Immediate(isolate()->factory()->fixed_array_map()));
    826   __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
    827 
    828   // Untag the length for the loop below.
    829   __ SmiUntag(ecx);
    830 
    831   // Copy the fixed array slots.
    832   Label loop;
    833   __ bind(&loop);
    834   __ mov(ebx, Operand(edx, -1 * kPointerSize));  // Skip receiver.
    835   __ mov(FieldOperand(edi, FixedArray::kHeaderSize), ebx);
    836   __ add(edi, Immediate(kPointerSize));
    837   __ sub(edx, Immediate(kPointerSize));
    838   __ dec(ecx);
    839   __ j(not_zero, &loop);
    840 
    841   // Return.
    842   __ bind(&done);
    843   __ ret(0);
    844 
    845   // Do the runtime call to allocate the arguments object.
    846   __ bind(&runtime);
    847   __ pop(eax);   // Pop return address.
    848   __ push(edi);  // Push function.
    849   __ push(edx);  // Push parameters pointer.
    850   __ push(ecx);  // Push parameter count.
    851   __ push(eax);  // Push return address.
    852   __ TailCallRuntime(Runtime::kNewStrictArguments);
    853 }
    854 
    855 
    856 void RestParamAccessStub::GenerateNew(MacroAssembler* masm) {
    857   // ecx : number of parameters (tagged)
    858   // edx : parameters pointer
    859   // ebx : rest parameter index (tagged)
    860   // esp[0] : return address
    861 
    862   // Check if the calling frame is an arguments adaptor frame.
    863   Label runtime;
    864   __ mov(edi, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
    865   __ mov(eax, Operand(edi, StandardFrameConstants::kContextOffset));
    866   __ cmp(eax, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
    867   __ j(not_equal, &runtime);
    868 
    869   // Patch the arguments.length and the parameters pointer.
    870   __ mov(ecx, Operand(edi, ArgumentsAdaptorFrameConstants::kLengthOffset));
    871   __ lea(edx,
    872          Operand(edi, ecx, times_2, StandardFrameConstants::kCallerSPOffset));
    873 
    874   __ bind(&runtime);
    875   __ pop(eax);   // Save return address.
    876   __ push(ecx);  // Push number of parameters.
    877   __ push(edx);  // Push parameters pointer.
    878   __ push(ebx);  // Push rest parameter index.
    879   __ push(eax);  // Push return address.
    880   __ TailCallRuntime(Runtime::kNewRestParam);
    881 }
    882 
    883 
    884 void RegExpExecStub::Generate(MacroAssembler* masm) {
    885   // Just jump directly to runtime if native RegExp is not selected at compile
    886   // time or if regexp entry in generated code is turned off runtime switch or
    887   // at compilation.
    888 #ifdef V8_INTERPRETED_REGEXP
    889   __ TailCallRuntime(Runtime::kRegExpExec);
    890 #else  // V8_INTERPRETED_REGEXP
    891 
    892   // Stack frame on entry.
    893   //  esp[0]: return address
    894   //  esp[4]: last_match_info (expected JSArray)
    895   //  esp[8]: previous index
    896   //  esp[12]: subject string
    897   //  esp[16]: JSRegExp object
    898 
    899   static const int kLastMatchInfoOffset = 1 * kPointerSize;
    900   static const int kPreviousIndexOffset = 2 * kPointerSize;
    901   static const int kSubjectOffset = 3 * kPointerSize;
    902   static const int kJSRegExpOffset = 4 * kPointerSize;
    903 
    904   Label runtime;
    905   Factory* factory = isolate()->factory();
    906 
    907   // Ensure that a RegExp stack is allocated.
    908   ExternalReference address_of_regexp_stack_memory_address =
    909       ExternalReference::address_of_regexp_stack_memory_address(isolate());
    910   ExternalReference address_of_regexp_stack_memory_size =
    911       ExternalReference::address_of_regexp_stack_memory_size(isolate());
    912   __ mov(ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
    913   __ test(ebx, ebx);
    914   __ j(zero, &runtime);
    915 
    916   // Check that the first argument is a JSRegExp object.
    917   __ mov(eax, Operand(esp, kJSRegExpOffset));
    918   STATIC_ASSERT(kSmiTag == 0);
    919   __ JumpIfSmi(eax, &runtime);
    920   __ CmpObjectType(eax, JS_REGEXP_TYPE, ecx);
    921   __ j(not_equal, &runtime);
    922 
    923   // Check that the RegExp has been compiled (data contains a fixed array).
    924   __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
    925   if (FLAG_debug_code) {
    926     __ test(ecx, Immediate(kSmiTagMask));
    927     __ Check(not_zero, kUnexpectedTypeForRegExpDataFixedArrayExpected);
    928     __ CmpObjectType(ecx, FIXED_ARRAY_TYPE, ebx);
    929     __ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected);
    930   }
    931 
    932   // ecx: RegExp data (FixedArray)
    933   // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
    934   __ mov(ebx, FieldOperand(ecx, JSRegExp::kDataTagOffset));
    935   __ cmp(ebx, Immediate(Smi::FromInt(JSRegExp::IRREGEXP)));
    936   __ j(not_equal, &runtime);
    937 
    938   // ecx: RegExp data (FixedArray)
    939   // Check that the number of captures fit in the static offsets vector buffer.
    940   __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset));
    941   // Check (number_of_captures + 1) * 2 <= offsets vector size
    942   // Or          number_of_captures * 2 <= offsets vector size - 2
    943   // Multiplying by 2 comes for free since edx is smi-tagged.
    944   STATIC_ASSERT(kSmiTag == 0);
    945   STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
    946   STATIC_ASSERT(Isolate::kJSRegexpStaticOffsetsVectorSize >= 2);
    947   __ cmp(edx, Isolate::kJSRegexpStaticOffsetsVectorSize - 2);
    948   __ j(above, &runtime);
    949 
    950   // Reset offset for possibly sliced string.
    951   __ Move(edi, Immediate(0));
    952   __ mov(eax, Operand(esp, kSubjectOffset));
    953   __ JumpIfSmi(eax, &runtime);
    954   __ mov(edx, eax);  // Make a copy of the original subject string.
    955   __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
    956   __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
    957 
    958   // eax: subject string
    959   // edx: subject string
    960   // ebx: subject string instance type
    961   // ecx: RegExp data (FixedArray)
    962   // Handle subject string according to its encoding and representation:
    963   // (1) Sequential two byte?  If yes, go to (9).
    964   // (2) Sequential one byte?  If yes, go to (6).
    965   // (3) Anything but sequential or cons?  If yes, go to (7).
    966   // (4) Cons string.  If the string is flat, replace subject with first string.
    967   //     Otherwise bailout.
    968   // (5a) Is subject sequential two byte?  If yes, go to (9).
    969   // (5b) Is subject external?  If yes, go to (8).
    970   // (6) One byte sequential.  Load regexp code for one byte.
    971   // (E) Carry on.
    972   /// [...]
    973 
    974   // Deferred code at the end of the stub:
    975   // (7) Not a long external string?  If yes, go to (10).
    976   // (8) External string.  Make it, offset-wise, look like a sequential string.
    977   // (8a) Is the external string one byte?  If yes, go to (6).
    978   // (9) Two byte sequential.  Load regexp code for one byte. Go to (E).
    979   // (10) Short external string or not a string?  If yes, bail out to runtime.
    980   // (11) Sliced string.  Replace subject with parent. Go to (5a).
    981 
    982   Label seq_one_byte_string /* 6 */, seq_two_byte_string /* 9 */,
    983         external_string /* 8 */, check_underlying /* 5a */,
    984         not_seq_nor_cons /* 7 */, check_code /* E */,
    985         not_long_external /* 10 */;
    986 
    987   // (1) Sequential two byte?  If yes, go to (9).
    988   __ and_(ebx, kIsNotStringMask |
    989                kStringRepresentationMask |
    990                kStringEncodingMask |
    991                kShortExternalStringMask);
    992   STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0);
    993   __ j(zero, &seq_two_byte_string);  // Go to (9).
    994 
    995   // (2) Sequential one byte?  If yes, go to (6).
    996   // Any other sequential string must be one byte.
    997   __ and_(ebx, Immediate(kIsNotStringMask |
    998                          kStringRepresentationMask |
    999                          kShortExternalStringMask));
   1000   __ j(zero, &seq_one_byte_string, Label::kNear);  // Go to (6).
   1001 
   1002   // (3) Anything but sequential or cons?  If yes, go to (7).
   1003   // We check whether the subject string is a cons, since sequential strings
   1004   // have already been covered.
   1005   STATIC_ASSERT(kConsStringTag < kExternalStringTag);
   1006   STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
   1007   STATIC_ASSERT(kIsNotStringMask > kExternalStringTag);
   1008   STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag);
   1009   __ cmp(ebx, Immediate(kExternalStringTag));
   1010   __ j(greater_equal, &not_seq_nor_cons);  // Go to (7).
   1011 
   1012   // (4) Cons string.  Check that it's flat.
   1013   // Replace subject with first string and reload instance type.
   1014   __ cmp(FieldOperand(eax, ConsString::kSecondOffset), factory->empty_string());
   1015   __ j(not_equal, &runtime);
   1016   __ mov(eax, FieldOperand(eax, ConsString::kFirstOffset));
   1017   __ bind(&check_underlying);
   1018   __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
   1019   __ mov(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
   1020 
   1021   // (5a) Is subject sequential two byte?  If yes, go to (9).
   1022   __ test_b(ebx, kStringRepresentationMask | kStringEncodingMask);
   1023   STATIC_ASSERT((kSeqStringTag | kTwoByteStringTag) == 0);
   1024   __ j(zero, &seq_two_byte_string);  // Go to (9).
   1025   // (5b) Is subject external?  If yes, go to (8).
   1026   __ test_b(ebx, kStringRepresentationMask);
   1027   // The underlying external string is never a short external string.
   1028   STATIC_ASSERT(ExternalString::kMaxShortLength < ConsString::kMinLength);
   1029   STATIC_ASSERT(ExternalString::kMaxShortLength < SlicedString::kMinLength);
   1030   __ j(not_zero, &external_string);  // Go to (8).
   1031 
   1032   // eax: sequential subject string (or look-alike, external string)
   1033   // edx: original subject string
   1034   // ecx: RegExp data (FixedArray)
   1035   // (6) One byte sequential.  Load regexp code for one byte.
   1036   __ bind(&seq_one_byte_string);
   1037   // Load previous index and check range before edx is overwritten.  We have
   1038   // to use edx instead of eax here because it might have been only made to
   1039   // look like a sequential string when it actually is an external string.
   1040   __ mov(ebx, Operand(esp, kPreviousIndexOffset));
   1041   __ JumpIfNotSmi(ebx, &runtime);
   1042   __ cmp(ebx, FieldOperand(edx, String::kLengthOffset));
   1043   __ j(above_equal, &runtime);
   1044   __ mov(edx, FieldOperand(ecx, JSRegExp::kDataOneByteCodeOffset));
   1045   __ Move(ecx, Immediate(1));  // Type is one byte.
   1046 
   1047   // (E) Carry on.  String handling is done.
   1048   __ bind(&check_code);
   1049   // edx: irregexp code
   1050   // Check that the irregexp code has been generated for the actual string
   1051   // encoding. If it has, the field contains a code object otherwise it contains
   1052   // a smi (code flushing support).
   1053   __ JumpIfSmi(edx, &runtime);
   1054 
   1055   // eax: subject string
   1056   // ebx: previous index (smi)
   1057   // edx: code
   1058   // ecx: encoding of subject string (1 if one_byte, 0 if two_byte);
   1059   // All checks done. Now push arguments for native regexp code.
   1060   Counters* counters = isolate()->counters();
   1061   __ IncrementCounter(counters->regexp_entry_native(), 1);
   1062 
   1063   // Isolates: note we add an additional parameter here (isolate pointer).
   1064   static const int kRegExpExecuteArguments = 9;
   1065   __ EnterApiExitFrame(kRegExpExecuteArguments);
   1066 
   1067   // Argument 9: Pass current isolate address.
   1068   __ mov(Operand(esp, 8 * kPointerSize),
   1069       Immediate(ExternalReference::isolate_address(isolate())));
   1070 
   1071   // Argument 8: Indicate that this is a direct call from JavaScript.
   1072   __ mov(Operand(esp, 7 * kPointerSize), Immediate(1));
   1073 
   1074   // Argument 7: Start (high end) of backtracking stack memory area.
   1075   __ mov(esi, Operand::StaticVariable(address_of_regexp_stack_memory_address));
   1076   __ add(esi, Operand::StaticVariable(address_of_regexp_stack_memory_size));
   1077   __ mov(Operand(esp, 6 * kPointerSize), esi);
   1078 
   1079   // Argument 6: Set the number of capture registers to zero to force global
   1080   // regexps to behave as non-global.  This does not affect non-global regexps.
   1081   __ mov(Operand(esp, 5 * kPointerSize), Immediate(0));
   1082 
   1083   // Argument 5: static offsets vector buffer.
   1084   __ mov(Operand(esp, 4 * kPointerSize),
   1085          Immediate(ExternalReference::address_of_static_offsets_vector(
   1086              isolate())));
   1087 
   1088   // Argument 2: Previous index.
   1089   __ SmiUntag(ebx);
   1090   __ mov(Operand(esp, 1 * kPointerSize), ebx);
   1091 
   1092   // Argument 1: Original subject string.
   1093   // The original subject is in the previous stack frame. Therefore we have to
   1094   // use ebp, which points exactly to one pointer size below the previous esp.
   1095   // (Because creating a new stack frame pushes the previous ebp onto the stack
   1096   // and thereby moves up esp by one kPointerSize.)
   1097   __ mov(esi, Operand(ebp, kSubjectOffset + kPointerSize));
   1098   __ mov(Operand(esp, 0 * kPointerSize), esi);
   1099 
   1100   // esi: original subject string
   1101   // eax: underlying subject string
   1102   // ebx: previous index
   1103   // ecx: encoding of subject string (1 if one_byte 0 if two_byte);
   1104   // edx: code
   1105   // Argument 4: End of string data
   1106   // Argument 3: Start of string data
   1107   // Prepare start and end index of the input.
   1108   // Load the length from the original sliced string if that is the case.
   1109   __ mov(esi, FieldOperand(esi, String::kLengthOffset));
   1110   __ add(esi, edi);  // Calculate input end wrt offset.
   1111   __ SmiUntag(edi);
   1112   __ add(ebx, edi);  // Calculate input start wrt offset.
   1113 
   1114   // ebx: start index of the input string
   1115   // esi: end index of the input string
   1116   Label setup_two_byte, setup_rest;
   1117   __ test(ecx, ecx);
   1118   __ j(zero, &setup_two_byte, Label::kNear);
   1119   __ SmiUntag(esi);
   1120   __ lea(ecx, FieldOperand(eax, esi, times_1, SeqOneByteString::kHeaderSize));
   1121   __ mov(Operand(esp, 3 * kPointerSize), ecx);  // Argument 4.
   1122   __ lea(ecx, FieldOperand(eax, ebx, times_1, SeqOneByteString::kHeaderSize));
   1123   __ mov(Operand(esp, 2 * kPointerSize), ecx);  // Argument 3.
   1124   __ jmp(&setup_rest, Label::kNear);
   1125 
   1126   __ bind(&setup_two_byte);
   1127   STATIC_ASSERT(kSmiTag == 0);
   1128   STATIC_ASSERT(kSmiTagSize == 1);  // esi is smi (powered by 2).
   1129   __ lea(ecx, FieldOperand(eax, esi, times_1, SeqTwoByteString::kHeaderSize));
   1130   __ mov(Operand(esp, 3 * kPointerSize), ecx);  // Argument 4.
   1131   __ lea(ecx, FieldOperand(eax, ebx, times_2, SeqTwoByteString::kHeaderSize));
   1132   __ mov(Operand(esp, 2 * kPointerSize), ecx);  // Argument 3.
   1133 
   1134   __ bind(&setup_rest);
   1135 
   1136   // Locate the code entry and call it.
   1137   __ add(edx, Immediate(Code::kHeaderSize - kHeapObjectTag));
   1138   __ call(edx);
   1139 
   1140   // Drop arguments and come back to JS mode.
   1141   __ LeaveApiExitFrame(true);
   1142 
   1143   // Check the result.
   1144   Label success;
   1145   __ cmp(eax, 1);
   1146   // We expect exactly one result since we force the called regexp to behave
   1147   // as non-global.
   1148   __ j(equal, &success);
   1149   Label failure;
   1150   __ cmp(eax, NativeRegExpMacroAssembler::FAILURE);
   1151   __ j(equal, &failure);
   1152   __ cmp(eax, NativeRegExpMacroAssembler::EXCEPTION);
   1153   // If not exception it can only be retry. Handle that in the runtime system.
   1154   __ j(not_equal, &runtime);
   1155   // Result must now be exception. If there is no pending exception already a
   1156   // stack overflow (on the backtrack stack) was detected in RegExp code but
   1157   // haven't created the exception yet. Handle that in the runtime system.
   1158   // TODO(592): Rerunning the RegExp to get the stack overflow exception.
   1159   ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
   1160                                       isolate());
   1161   __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
   1162   __ mov(eax, Operand::StaticVariable(pending_exception));
   1163   __ cmp(edx, eax);
   1164   __ j(equal, &runtime);
   1165 
   1166   // For exception, throw the exception again.
   1167   __ TailCallRuntime(Runtime::kRegExpExecReThrow);
   1168 
   1169   __ bind(&failure);
   1170   // For failure to match, return null.
   1171   __ mov(eax, factory->null_value());
   1172   __ ret(4 * kPointerSize);
   1173 
   1174   // Load RegExp data.
   1175   __ bind(&success);
   1176   __ mov(eax, Operand(esp, kJSRegExpOffset));
   1177   __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
   1178   __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset));
   1179   // Calculate number of capture registers (number_of_captures + 1) * 2.
   1180   STATIC_ASSERT(kSmiTag == 0);
   1181   STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
   1182   __ add(edx, Immediate(2));  // edx was a smi.
   1183 
   1184   // edx: Number of capture registers
   1185   // Load last_match_info which is still known to be a fast case JSArray.
   1186   // Check that the fourth object is a JSArray object.
   1187   __ mov(eax, Operand(esp, kLastMatchInfoOffset));
   1188   __ JumpIfSmi(eax, &runtime);
   1189   __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
   1190   __ j(not_equal, &runtime);
   1191   // Check that the JSArray is in fast case.
   1192   __ mov(ebx, FieldOperand(eax, JSArray::kElementsOffset));
   1193   __ mov(eax, FieldOperand(ebx, HeapObject::kMapOffset));
   1194   __ cmp(eax, factory->fixed_array_map());
   1195   __ j(not_equal, &runtime);
   1196   // Check that the last match info has space for the capture registers and the
   1197   // additional information.
   1198   __ mov(eax, FieldOperand(ebx, FixedArray::kLengthOffset));
   1199   __ SmiUntag(eax);
   1200   __ sub(eax, Immediate(RegExpImpl::kLastMatchOverhead));
   1201   __ cmp(edx, eax);
   1202   __ j(greater, &runtime);
   1203 
   1204   // ebx: last_match_info backing store (FixedArray)
   1205   // edx: number of capture registers
   1206   // Store the capture count.
   1207   __ SmiTag(edx);  // Number of capture registers to smi.
   1208   __ mov(FieldOperand(ebx, RegExpImpl::kLastCaptureCountOffset), edx);
   1209   __ SmiUntag(edx);  // Number of capture registers back from smi.
   1210   // Store last subject and last input.
   1211   __ mov(eax, Operand(esp, kSubjectOffset));
   1212   __ mov(ecx, eax);
   1213   __ mov(FieldOperand(ebx, RegExpImpl::kLastSubjectOffset), eax);
   1214   __ RecordWriteField(ebx, RegExpImpl::kLastSubjectOffset, eax, edi,
   1215                       kDontSaveFPRegs);
   1216   __ mov(eax, ecx);
   1217   __ mov(FieldOperand(ebx, RegExpImpl::kLastInputOffset), eax);
   1218   __ RecordWriteField(ebx, RegExpImpl::kLastInputOffset, eax, edi,
   1219                       kDontSaveFPRegs);
   1220 
   1221   // Get the static offsets vector filled by the native regexp code.
   1222   ExternalReference address_of_static_offsets_vector =
   1223       ExternalReference::address_of_static_offsets_vector(isolate());
   1224   __ mov(ecx, Immediate(address_of_static_offsets_vector));
   1225 
   1226   // ebx: last_match_info backing store (FixedArray)
   1227   // ecx: offsets vector
   1228   // edx: number of capture registers
   1229   Label next_capture, done;
   1230   // Capture register counter starts from number of capture registers and
   1231   // counts down until wraping after zero.
   1232   __ bind(&next_capture);
   1233   __ sub(edx, Immediate(1));
   1234   __ j(negative, &done, Label::kNear);
   1235   // Read the value from the static offsets vector buffer.
   1236   __ mov(edi, Operand(ecx, edx, times_int_size, 0));
   1237   __ SmiTag(edi);
   1238   // Store the smi value in the last match info.
   1239   __ mov(FieldOperand(ebx,
   1240                       edx,
   1241                       times_pointer_size,
   1242                       RegExpImpl::kFirstCaptureOffset),
   1243                       edi);
   1244   __ jmp(&next_capture);
   1245   __ bind(&done);
   1246 
   1247   // Return last match info.
   1248   __ mov(eax, Operand(esp, kLastMatchInfoOffset));
   1249   __ ret(4 * kPointerSize);
   1250 
   1251   // Do the runtime call to execute the regexp.
   1252   __ bind(&runtime);
   1253   __ TailCallRuntime(Runtime::kRegExpExec);
   1254 
   1255   // Deferred code for string handling.
   1256   // (7) Not a long external string?  If yes, go to (10).
   1257   __ bind(&not_seq_nor_cons);
   1258   // Compare flags are still set from (3).
   1259   __ j(greater, &not_long_external, Label::kNear);  // Go to (10).
   1260 
   1261   // (8) External string.  Short external strings have been ruled out.
   1262   __ bind(&external_string);
   1263   // Reload instance type.
   1264   __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
   1265   __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
   1266   if (FLAG_debug_code) {
   1267     // Assert that we do not have a cons or slice (indirect strings) here.
   1268     // Sequential strings have already been ruled out.
   1269     __ test_b(ebx, kIsIndirectStringMask);
   1270     __ Assert(zero, kExternalStringExpectedButNotFound);
   1271   }
   1272   __ mov(eax, FieldOperand(eax, ExternalString::kResourceDataOffset));
   1273   // Move the pointer so that offset-wise, it looks like a sequential string.
   1274   STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
   1275   __ sub(eax, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
   1276   STATIC_ASSERT(kTwoByteStringTag == 0);
   1277   // (8a) Is the external string one byte?  If yes, go to (6).
   1278   __ test_b(ebx, kStringEncodingMask);
   1279   __ j(not_zero, &seq_one_byte_string);  // Goto (6).
   1280 
   1281   // eax: sequential subject string (or look-alike, external string)
   1282   // edx: original subject string
   1283   // ecx: RegExp data (FixedArray)
   1284   // (9) Two byte sequential.  Load regexp code for one byte. Go to (E).
   1285   __ bind(&seq_two_byte_string);
   1286   // Load previous index and check range before edx is overwritten.  We have
   1287   // to use edx instead of eax here because it might have been only made to
   1288   // look like a sequential string when it actually is an external string.
   1289   __ mov(ebx, Operand(esp, kPreviousIndexOffset));
   1290   __ JumpIfNotSmi(ebx, &runtime);
   1291   __ cmp(ebx, FieldOperand(edx, String::kLengthOffset));
   1292   __ j(above_equal, &runtime);
   1293   __ mov(edx, FieldOperand(ecx, JSRegExp::kDataUC16CodeOffset));
   1294   __ Move(ecx, Immediate(0));  // Type is two byte.
   1295   __ jmp(&check_code);  // Go to (E).
   1296 
   1297   // (10) Not a string or a short external string?  If yes, bail out to runtime.
   1298   __ bind(&not_long_external);
   1299   // Catch non-string subject or short external string.
   1300   STATIC_ASSERT(kNotStringTag != 0 && kShortExternalStringTag !=0);
   1301   __ test(ebx, Immediate(kIsNotStringMask | kShortExternalStringTag));
   1302   __ j(not_zero, &runtime);
   1303 
   1304   // (11) Sliced string.  Replace subject with parent.  Go to (5a).
   1305   // Load offset into edi and replace subject string with parent.
   1306   __ mov(edi, FieldOperand(eax, SlicedString::kOffsetOffset));
   1307   __ mov(eax, FieldOperand(eax, SlicedString::kParentOffset));
   1308   __ jmp(&check_underlying);  // Go to (5a).
   1309 #endif  // V8_INTERPRETED_REGEXP
   1310 }
   1311 
   1312 
   1313 static int NegativeComparisonResult(Condition cc) {
   1314   DCHECK(cc != equal);
   1315   DCHECK((cc == less) || (cc == less_equal)
   1316       || (cc == greater) || (cc == greater_equal));
   1317   return (cc == greater || cc == greater_equal) ? LESS : GREATER;
   1318 }
   1319 
   1320 
   1321 static void CheckInputType(MacroAssembler* masm, Register input,
   1322                            CompareICState::State expected, Label* fail) {
   1323   Label ok;
   1324   if (expected == CompareICState::SMI) {
   1325     __ JumpIfNotSmi(input, fail);
   1326   } else if (expected == CompareICState::NUMBER) {
   1327     __ JumpIfSmi(input, &ok);
   1328     __ cmp(FieldOperand(input, HeapObject::kMapOffset),
   1329            Immediate(masm->isolate()->factory()->heap_number_map()));
   1330     __ j(not_equal, fail);
   1331   }
   1332   // We could be strict about internalized/non-internalized here, but as long as
   1333   // hydrogen doesn't care, the stub doesn't have to care either.
   1334   __ bind(&ok);
   1335 }
   1336 
   1337 
   1338 static void BranchIfNotInternalizedString(MacroAssembler* masm,
   1339                                           Label* label,
   1340                                           Register object,
   1341                                           Register scratch) {
   1342   __ JumpIfSmi(object, label);
   1343   __ mov(scratch, FieldOperand(object, HeapObject::kMapOffset));
   1344   __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
   1345   STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
   1346   __ test(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
   1347   __ j(not_zero, label);
   1348 }
   1349 
   1350 
   1351 void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
   1352   Label runtime_call, check_unequal_objects;
   1353   Condition cc = GetCondition();
   1354 
   1355   Label miss;
   1356   CheckInputType(masm, edx, left(), &miss);
   1357   CheckInputType(masm, eax, right(), &miss);
   1358 
   1359   // Compare two smis.
   1360   Label non_smi, smi_done;
   1361   __ mov(ecx, edx);
   1362   __ or_(ecx, eax);
   1363   __ JumpIfNotSmi(ecx, &non_smi, Label::kNear);
   1364   __ sub(edx, eax);  // Return on the result of the subtraction.
   1365   __ j(no_overflow, &smi_done, Label::kNear);
   1366   __ not_(edx);  // Correct sign in case of overflow. edx is never 0 here.
   1367   __ bind(&smi_done);
   1368   __ mov(eax, edx);
   1369   __ ret(0);
   1370   __ bind(&non_smi);
   1371 
   1372   // NOTICE! This code is only reached after a smi-fast-case check, so
   1373   // it is certain that at least one operand isn't a smi.
   1374 
   1375   // Identical objects can be compared fast, but there are some tricky cases
   1376   // for NaN and undefined.
   1377   Label generic_heap_number_comparison;
   1378   {
   1379     Label not_identical;
   1380     __ cmp(eax, edx);
   1381     __ j(not_equal, &not_identical);
   1382 
   1383     if (cc != equal) {
   1384       // Check for undefined.  undefined OP undefined is false even though
   1385       // undefined == undefined.
   1386       __ cmp(edx, isolate()->factory()->undefined_value());
   1387       if (is_strong(strength())) {
   1388         // In strong mode, this comparison must throw, so call the runtime.
   1389         __ j(equal, &runtime_call, Label::kFar);
   1390       } else {
   1391         Label check_for_nan;
   1392         __ j(not_equal, &check_for_nan, Label::kNear);
   1393         __ Move(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc))));
   1394         __ ret(0);
   1395         __ bind(&check_for_nan);
   1396       }
   1397     }
   1398 
   1399     // Test for NaN. Compare heap numbers in a general way,
   1400     // to handle NaNs correctly.
   1401     __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
   1402            Immediate(isolate()->factory()->heap_number_map()));
   1403     __ j(equal, &generic_heap_number_comparison, Label::kNear);
   1404     if (cc != equal) {
   1405       __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
   1406       __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
   1407       // Call runtime on identical JSObjects.  Otherwise return equal.
   1408       __ cmpb(ecx, static_cast<uint8_t>(FIRST_JS_RECEIVER_TYPE));
   1409       __ j(above_equal, &runtime_call, Label::kFar);
   1410       // Call runtime on identical symbols since we need to throw a TypeError.
   1411       __ cmpb(ecx, static_cast<uint8_t>(SYMBOL_TYPE));
   1412       __ j(equal, &runtime_call, Label::kFar);
   1413       // Call runtime on identical SIMD values since we must throw a TypeError.
   1414       __ cmpb(ecx, static_cast<uint8_t>(SIMD128_VALUE_TYPE));
   1415       __ j(equal, &runtime_call, Label::kFar);
   1416       if (is_strong(strength())) {
   1417         // We have already tested for smis and heap numbers, so if both
   1418         // arguments are not strings we must proceed to the slow case.
   1419         __ test(ecx, Immediate(kIsNotStringMask));
   1420         __ j(not_zero, &runtime_call, Label::kFar);
   1421       }
   1422     }
   1423     __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
   1424     __ ret(0);
   1425 
   1426 
   1427     __ bind(&not_identical);
   1428   }
   1429 
   1430   // Strict equality can quickly decide whether objects are equal.
   1431   // Non-strict object equality is slower, so it is handled later in the stub.
   1432   if (cc == equal && strict()) {
   1433     Label slow;  // Fallthrough label.
   1434     Label not_smis;
   1435     // If we're doing a strict equality comparison, we don't have to do
   1436     // type conversion, so we generate code to do fast comparison for objects
   1437     // and oddballs. Non-smi numbers and strings still go through the usual
   1438     // slow-case code.
   1439     // If either is a Smi (we know that not both are), then they can only
   1440     // be equal if the other is a HeapNumber. If so, use the slow case.
   1441     STATIC_ASSERT(kSmiTag == 0);
   1442     DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
   1443     __ mov(ecx, Immediate(kSmiTagMask));
   1444     __ and_(ecx, eax);
   1445     __ test(ecx, edx);
   1446     __ j(not_zero, &not_smis, Label::kNear);
   1447     // One operand is a smi.
   1448 
   1449     // Check whether the non-smi is a heap number.
   1450     STATIC_ASSERT(kSmiTagMask == 1);
   1451     // ecx still holds eax & kSmiTag, which is either zero or one.
   1452     __ sub(ecx, Immediate(0x01));
   1453     __ mov(ebx, edx);
   1454     __ xor_(ebx, eax);
   1455     __ and_(ebx, ecx);  // ebx holds either 0 or eax ^ edx.
   1456     __ xor_(ebx, eax);
   1457     // if eax was smi, ebx is now edx, else eax.
   1458 
   1459     // Check if the non-smi operand is a heap number.
   1460     __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
   1461            Immediate(isolate()->factory()->heap_number_map()));
   1462     // If heap number, handle it in the slow case.
   1463     __ j(equal, &slow, Label::kNear);
   1464     // Return non-equal (ebx is not zero)
   1465     __ mov(eax, ebx);
   1466     __ ret(0);
   1467 
   1468     __ bind(&not_smis);
   1469     // If either operand is a JSObject or an oddball value, then they are not
   1470     // equal since their pointers are different
   1471     // There is no test for undetectability in strict equality.
   1472 
   1473     // Get the type of the first operand.
   1474     // If the first object is a JS object, we have done pointer comparison.
   1475     Label first_non_object;
   1476     STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
   1477     __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
   1478     __ j(below, &first_non_object, Label::kNear);
   1479 
   1480     // Return non-zero (eax is not zero)
   1481     Label return_not_equal;
   1482     STATIC_ASSERT(kHeapObjectTag != 0);
   1483     __ bind(&return_not_equal);
   1484     __ ret(0);
   1485 
   1486     __ bind(&first_non_object);
   1487     // Check for oddballs: true, false, null, undefined.
   1488     __ CmpInstanceType(ecx, ODDBALL_TYPE);
   1489     __ j(equal, &return_not_equal);
   1490 
   1491     __ CmpObjectType(edx, FIRST_JS_RECEIVER_TYPE, ecx);
   1492     __ j(above_equal, &return_not_equal);
   1493 
   1494     // Check for oddballs: true, false, null, undefined.
   1495     __ CmpInstanceType(ecx, ODDBALL_TYPE);
   1496     __ j(equal, &return_not_equal);
   1497 
   1498     // Fall through to the general case.
   1499     __ bind(&slow);
   1500   }
   1501 
   1502   // Generate the number comparison code.
   1503   Label non_number_comparison;
   1504   Label unordered;
   1505   __ bind(&generic_heap_number_comparison);
   1506   FloatingPointHelper::CheckFloatOperands(
   1507       masm, &non_number_comparison, ebx);
   1508   FloatingPointHelper::LoadFloatOperand(masm, eax);
   1509   FloatingPointHelper::LoadFloatOperand(masm, edx);
   1510   __ FCmp();
   1511 
   1512   // Don't base result on EFLAGS when a NaN is involved.
   1513   __ j(parity_even, &unordered, Label::kNear);
   1514 
   1515   Label below_label, above_label;
   1516   // Return a result of -1, 0, or 1, based on EFLAGS.
   1517   __ j(below, &below_label, Label::kNear);
   1518   __ j(above, &above_label, Label::kNear);
   1519 
   1520   __ Move(eax, Immediate(0));
   1521   __ ret(0);
   1522 
   1523   __ bind(&below_label);
   1524   __ mov(eax, Immediate(Smi::FromInt(-1)));
   1525   __ ret(0);
   1526 
   1527   __ bind(&above_label);
   1528   __ mov(eax, Immediate(Smi::FromInt(1)));
   1529   __ ret(0);
   1530 
   1531   // If one of the numbers was NaN, then the result is always false.
   1532   // The cc is never not-equal.
   1533   __ bind(&unordered);
   1534   DCHECK(cc != not_equal);
   1535   if (cc == less || cc == less_equal) {
   1536     __ mov(eax, Immediate(Smi::FromInt(1)));
   1537   } else {
   1538     __ mov(eax, Immediate(Smi::FromInt(-1)));
   1539   }
   1540   __ ret(0);
   1541 
   1542   // The number comparison code did not provide a valid result.
   1543   __ bind(&non_number_comparison);
   1544 
   1545   // Fast negative check for internalized-to-internalized equality.
   1546   Label check_for_strings;
   1547   if (cc == equal) {
   1548     BranchIfNotInternalizedString(masm, &check_for_strings, eax, ecx);
   1549     BranchIfNotInternalizedString(masm, &check_for_strings, edx, ecx);
   1550 
   1551     // We've already checked for object identity, so if both operands
   1552     // are internalized they aren't equal. Register eax already holds a
   1553     // non-zero value, which indicates not equal, so just return.
   1554     __ ret(0);
   1555   }
   1556 
   1557   __ bind(&check_for_strings);
   1558 
   1559   __ JumpIfNotBothSequentialOneByteStrings(edx, eax, ecx, ebx,
   1560                                            &check_unequal_objects);
   1561 
   1562   // Inline comparison of one-byte strings.
   1563   if (cc == equal) {
   1564     StringHelper::GenerateFlatOneByteStringEquals(masm, edx, eax, ecx, ebx);
   1565   } else {
   1566     StringHelper::GenerateCompareFlatOneByteStrings(masm, edx, eax, ecx, ebx,
   1567                                                     edi);
   1568   }
   1569 #ifdef DEBUG
   1570   __ Abort(kUnexpectedFallThroughFromStringComparison);
   1571 #endif
   1572 
   1573   __ bind(&check_unequal_objects);
   1574   if (cc == equal && !strict()) {
   1575     // Non-strict equality.  Objects are unequal if
   1576     // they are both JSObjects and not undetectable,
   1577     // and their pointers are different.
   1578     Label return_unequal;
   1579     // At most one is a smi, so we can test for smi by adding the two.
   1580     // A smi plus a heap object has the low bit set, a heap object plus
   1581     // a heap object has the low bit clear.
   1582     STATIC_ASSERT(kSmiTag == 0);
   1583     STATIC_ASSERT(kSmiTagMask == 1);
   1584     __ lea(ecx, Operand(eax, edx, times_1, 0));
   1585     __ test(ecx, Immediate(kSmiTagMask));
   1586     __ j(not_zero, &runtime_call, Label::kNear);
   1587     __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
   1588     __ j(below, &runtime_call, Label::kNear);
   1589     __ CmpObjectType(edx, FIRST_JS_RECEIVER_TYPE, ebx);
   1590     __ j(below, &runtime_call, Label::kNear);
   1591     // We do not bail out after this point.  Both are JSObjects, and
   1592     // they are equal if and only if both are undetectable.
   1593     // The and of the undetectable flags is 1 if and only if they are equal.
   1594     __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
   1595               1 << Map::kIsUndetectable);
   1596     __ j(zero, &return_unequal, Label::kNear);
   1597     __ test_b(FieldOperand(ebx, Map::kBitFieldOffset),
   1598               1 << Map::kIsUndetectable);
   1599     __ j(zero, &return_unequal, Label::kNear);
   1600     // The objects are both undetectable, so they both compare as the value
   1601     // undefined, and are equal.
   1602     __ Move(eax, Immediate(EQUAL));
   1603     __ bind(&return_unequal);
   1604     // Return non-equal by returning the non-zero object pointer in eax,
   1605     // or return equal if we fell through to here.
   1606     __ ret(0);  // rax, rdx were pushed
   1607   }
   1608   __ bind(&runtime_call);
   1609 
   1610   // Push arguments below the return address.
   1611   __ pop(ecx);
   1612   __ push(edx);
   1613   __ push(eax);
   1614 
   1615   // Figure out which native to call and setup the arguments.
   1616   if (cc == equal) {
   1617     __ push(ecx);
   1618     __ TailCallRuntime(strict() ? Runtime::kStrictEquals : Runtime::kEquals);
   1619   } else {
   1620     __ push(Immediate(Smi::FromInt(NegativeComparisonResult(cc))));
   1621 
   1622     // Restore return address on the stack.
   1623     __ push(ecx);
   1624 
   1625     // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
   1626     // tagged as a small integer.
   1627     __ TailCallRuntime(is_strong(strength()) ? Runtime::kCompare_Strong
   1628                                              : Runtime::kCompare);
   1629   }
   1630 
   1631   __ bind(&miss);
   1632   GenerateMiss(masm);
   1633 }
   1634 
   1635 
   1636 static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub) {
   1637   // eax : number of arguments to the construct function
   1638   // ebx : feedback vector
   1639   // edx : slot in feedback vector (Smi)
   1640   // edi : the function to call
   1641 
   1642   {
   1643     FrameScope scope(masm, StackFrame::INTERNAL);
   1644 
   1645     // Number-of-arguments register must be smi-tagged to call out.
   1646     __ SmiTag(eax);
   1647     __ push(eax);
   1648     __ push(edi);
   1649     __ push(edx);
   1650     __ push(ebx);
   1651 
   1652     __ CallStub(stub);
   1653 
   1654     __ pop(ebx);
   1655     __ pop(edx);
   1656     __ pop(edi);
   1657     __ pop(eax);
   1658     __ SmiUntag(eax);
   1659   }
   1660 }
   1661 
   1662 
   1663 static void GenerateRecordCallTarget(MacroAssembler* masm) {
   1664   // Cache the called function in a feedback vector slot.  Cache states
   1665   // are uninitialized, monomorphic (indicated by a JSFunction), and
   1666   // megamorphic.
   1667   // eax : number of arguments to the construct function
   1668   // ebx : feedback vector
   1669   // edx : slot in feedback vector (Smi)
   1670   // edi : the function to call
   1671   Isolate* isolate = masm->isolate();
   1672   Label initialize, done, miss, megamorphic, not_array_function;
   1673 
   1674   // Load the cache state into ecx.
   1675   __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size,
   1676                            FixedArray::kHeaderSize));
   1677 
   1678   // A monomorphic cache hit or an already megamorphic state: invoke the
   1679   // function without changing the state.
   1680   // We don't know if ecx is a WeakCell or a Symbol, but it's harmless to read
   1681   // at this position in a symbol (see static asserts in
   1682   // type-feedback-vector.h).
   1683   Label check_allocation_site;
   1684   __ cmp(edi, FieldOperand(ecx, WeakCell::kValueOffset));
   1685   __ j(equal, &done, Label::kFar);
   1686   __ CompareRoot(ecx, Heap::kmegamorphic_symbolRootIndex);
   1687   __ j(equal, &done, Label::kFar);
   1688   __ CompareRoot(FieldOperand(ecx, HeapObject::kMapOffset),
   1689                  Heap::kWeakCellMapRootIndex);
   1690   __ j(not_equal, &check_allocation_site);
   1691 
   1692   // If the weak cell is cleared, we have a new chance to become monomorphic.
   1693   __ JumpIfSmi(FieldOperand(ecx, WeakCell::kValueOffset), &initialize);
   1694   __ jmp(&megamorphic);
   1695 
   1696   __ bind(&check_allocation_site);
   1697   // If we came here, we need to see if we are the array function.
   1698   // If we didn't have a matching function, and we didn't find the megamorph
   1699   // sentinel, then we have in the slot either some other function or an
   1700   // AllocationSite.
   1701   __ CompareRoot(FieldOperand(ecx, 0), Heap::kAllocationSiteMapRootIndex);
   1702   __ j(not_equal, &miss);
   1703 
   1704   // Make sure the function is the Array() function
   1705   __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
   1706   __ cmp(edi, ecx);
   1707   __ j(not_equal, &megamorphic);
   1708   __ jmp(&done, Label::kFar);
   1709 
   1710   __ bind(&miss);
   1711 
   1712   // A monomorphic miss (i.e, here the cache is not uninitialized) goes
   1713   // megamorphic.
   1714   __ CompareRoot(ecx, Heap::kuninitialized_symbolRootIndex);
   1715   __ j(equal, &initialize);
   1716   // MegamorphicSentinel is an immortal immovable object (undefined) so no
   1717   // write-barrier is needed.
   1718   __ bind(&megamorphic);
   1719   __ mov(
   1720       FieldOperand(ebx, edx, times_half_pointer_size, FixedArray::kHeaderSize),
   1721       Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate)));
   1722   __ jmp(&done, Label::kFar);
   1723 
   1724   // An uninitialized cache is patched with the function or sentinel to
   1725   // indicate the ElementsKind if function is the Array constructor.
   1726   __ bind(&initialize);
   1727   // Make sure the function is the Array() function
   1728   __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
   1729   __ cmp(edi, ecx);
   1730   __ j(not_equal, &not_array_function);
   1731 
   1732   // The target function is the Array constructor,
   1733   // Create an AllocationSite if we don't already have it, store it in the
   1734   // slot.
   1735   CreateAllocationSiteStub create_stub(isolate);
   1736   CallStubInRecordCallTarget(masm, &create_stub);
   1737   __ jmp(&done);
   1738 
   1739   __ bind(&not_array_function);
   1740   CreateWeakCellStub weak_cell_stub(isolate);
   1741   CallStubInRecordCallTarget(masm, &weak_cell_stub);
   1742   __ bind(&done);
   1743 }
   1744 
   1745 
   1746 void CallConstructStub::Generate(MacroAssembler* masm) {
   1747   // eax : number of arguments
   1748   // ebx : feedback vector
   1749   // edx : slot in feedback vector (Smi, for RecordCallTarget)
   1750   // edi : constructor function
   1751 
   1752   Label non_function;
   1753   // Check that function is not a smi.
   1754   __ JumpIfSmi(edi, &non_function);
   1755   // Check that function is a JSFunction.
   1756   __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
   1757   __ j(not_equal, &non_function);
   1758 
   1759   GenerateRecordCallTarget(masm);
   1760 
   1761   Label feedback_register_initialized;
   1762   // Put the AllocationSite from the feedback vector into ebx, or undefined.
   1763   __ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size,
   1764                            FixedArray::kHeaderSize));
   1765   Handle<Map> allocation_site_map = isolate()->factory()->allocation_site_map();
   1766   __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map));
   1767   __ j(equal, &feedback_register_initialized);
   1768   __ mov(ebx, isolate()->factory()->undefined_value());
   1769   __ bind(&feedback_register_initialized);
   1770 
   1771   __ AssertUndefinedOrAllocationSite(ebx);
   1772 
   1773   // Pass new target to construct stub.
   1774   __ mov(edx, edi);
   1775 
   1776   // Tail call to the function-specific construct stub (still in the caller
   1777   // context at this point).
   1778   __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
   1779   __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kConstructStubOffset));
   1780   __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize));
   1781   __ jmp(ecx);
   1782 
   1783   __ bind(&non_function);
   1784   __ mov(edx, edi);
   1785   __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
   1786 }
   1787 
   1788 
   1789 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
   1790   // edi - function
   1791   // edx - slot id
   1792   // ebx - vector
   1793   __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
   1794   __ cmp(edi, ecx);
   1795   __ j(not_equal, miss);
   1796 
   1797   __ mov(eax, arg_count());
   1798   // Reload ecx.
   1799   __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size,
   1800                            FixedArray::kHeaderSize));
   1801 
   1802   // Increment the call count for monomorphic function calls.
   1803   __ add(FieldOperand(ebx, edx, times_half_pointer_size,
   1804                       FixedArray::kHeaderSize + kPointerSize),
   1805          Immediate(Smi::FromInt(CallICNexus::kCallCountIncrement)));
   1806 
   1807   __ mov(ebx, ecx);
   1808   __ mov(edx, edi);
   1809   ArrayConstructorStub stub(masm->isolate(), arg_count());
   1810   __ TailCallStub(&stub);
   1811 
   1812   // Unreachable.
   1813 }
   1814 
   1815 
   1816 void CallICStub::Generate(MacroAssembler* masm) {
   1817   // edi - function
   1818   // edx - slot id
   1819   // ebx - vector
   1820   Isolate* isolate = masm->isolate();
   1821   Label extra_checks_or_miss, call, call_function;
   1822   int argc = arg_count();
   1823   ParameterCount actual(argc);
   1824 
   1825   // The checks. First, does edi match the recorded monomorphic target?
   1826   __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size,
   1827                            FixedArray::kHeaderSize));
   1828 
   1829   // We don't know that we have a weak cell. We might have a private symbol
   1830   // or an AllocationSite, but the memory is safe to examine.
   1831   // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to
   1832   // FixedArray.
   1833   // WeakCell::kValueOffset - contains a JSFunction or Smi(0)
   1834   // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not
   1835   // computed, meaning that it can't appear to be a pointer. If the low bit is
   1836   // 0, then hash is computed, but the 0 bit prevents the field from appearing
   1837   // to be a pointer.
   1838   STATIC_ASSERT(WeakCell::kSize >= kPointerSize);
   1839   STATIC_ASSERT(AllocationSite::kTransitionInfoOffset ==
   1840                     WeakCell::kValueOffset &&
   1841                 WeakCell::kValueOffset == Symbol::kHashFieldSlot);
   1842 
   1843   __ cmp(edi, FieldOperand(ecx, WeakCell::kValueOffset));
   1844   __ j(not_equal, &extra_checks_or_miss);
   1845 
   1846   // The compare above could have been a SMI/SMI comparison. Guard against this
   1847   // convincing us that we have a monomorphic JSFunction.
   1848   __ JumpIfSmi(edi, &extra_checks_or_miss);
   1849 
   1850   // Increment the call count for monomorphic function calls.
   1851   __ add(FieldOperand(ebx, edx, times_half_pointer_size,
   1852                       FixedArray::kHeaderSize + kPointerSize),
   1853          Immediate(Smi::FromInt(CallICNexus::kCallCountIncrement)));
   1854 
   1855   __ bind(&call_function);
   1856   __ Set(eax, argc);
   1857   __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode()),
   1858           RelocInfo::CODE_TARGET);
   1859 
   1860   __ bind(&extra_checks_or_miss);
   1861   Label uninitialized, miss, not_allocation_site;
   1862 
   1863   __ cmp(ecx, Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate)));
   1864   __ j(equal, &call);
   1865 
   1866   // Check if we have an allocation site.
   1867   __ CompareRoot(FieldOperand(ecx, HeapObject::kMapOffset),
   1868                  Heap::kAllocationSiteMapRootIndex);
   1869   __ j(not_equal, &not_allocation_site);
   1870 
   1871   // We have an allocation site.
   1872   HandleArrayCase(masm, &miss);
   1873 
   1874   __ bind(&not_allocation_site);
   1875 
   1876   // The following cases attempt to handle MISS cases without going to the
   1877   // runtime.
   1878   if (FLAG_trace_ic) {
   1879     __ jmp(&miss);
   1880   }
   1881 
   1882   __ cmp(ecx, Immediate(TypeFeedbackVector::UninitializedSentinel(isolate)));
   1883   __ j(equal, &uninitialized);
   1884 
   1885   // We are going megamorphic. If the feedback is a JSFunction, it is fine
   1886   // to handle it here. More complex cases are dealt with in the runtime.
   1887   __ AssertNotSmi(ecx);
   1888   __ CmpObjectType(ecx, JS_FUNCTION_TYPE, ecx);
   1889   __ j(not_equal, &miss);
   1890   __ mov(
   1891       FieldOperand(ebx, edx, times_half_pointer_size, FixedArray::kHeaderSize),
   1892       Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate)));
   1893 
   1894   __ bind(&call);
   1895   __ Set(eax, argc);
   1896   __ Jump(masm->isolate()->builtins()->Call(convert_mode()),
   1897           RelocInfo::CODE_TARGET);
   1898 
   1899   __ bind(&uninitialized);
   1900 
   1901   // We are going monomorphic, provided we actually have a JSFunction.
   1902   __ JumpIfSmi(edi, &miss);
   1903 
   1904   // Goto miss case if we do not have a function.
   1905   __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
   1906   __ j(not_equal, &miss);
   1907 
   1908   // Make sure the function is not the Array() function, which requires special
   1909   // behavior on MISS.
   1910   __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
   1911   __ cmp(edi, ecx);
   1912   __ j(equal, &miss);
   1913 
   1914   // Make sure the function belongs to the same native context.
   1915   __ mov(ecx, FieldOperand(edi, JSFunction::kContextOffset));
   1916   __ mov(ecx, ContextOperand(ecx, Context::NATIVE_CONTEXT_INDEX));
   1917   __ cmp(ecx, NativeContextOperand());
   1918   __ j(not_equal, &miss);
   1919 
   1920   // Initialize the call counter.
   1921   __ mov(FieldOperand(ebx, edx, times_half_pointer_size,
   1922                       FixedArray::kHeaderSize + kPointerSize),
   1923          Immediate(Smi::FromInt(CallICNexus::kCallCountIncrement)));
   1924 
   1925   // Store the function. Use a stub since we need a frame for allocation.
   1926   // ebx - vector
   1927   // edx - slot
   1928   // edi - function
   1929   {
   1930     FrameScope scope(masm, StackFrame::INTERNAL);
   1931     CreateWeakCellStub create_stub(isolate);
   1932     __ push(edi);
   1933     __ CallStub(&create_stub);
   1934     __ pop(edi);
   1935   }
   1936 
   1937   __ jmp(&call_function);
   1938 
   1939   // We are here because tracing is on or we encountered a MISS case we can't
   1940   // handle here.
   1941   __ bind(&miss);
   1942   GenerateMiss(masm);
   1943 
   1944   __ jmp(&call);
   1945 
   1946   // Unreachable
   1947   __ int3();
   1948 }
   1949 
   1950 
   1951 void CallICStub::GenerateMiss(MacroAssembler* masm) {
   1952   FrameScope scope(masm, StackFrame::INTERNAL);
   1953 
   1954   // Push the function and feedback info.
   1955   __ push(edi);
   1956   __ push(ebx);
   1957   __ push(edx);
   1958 
   1959   // Call the entry.
   1960   __ CallRuntime(Runtime::kCallIC_Miss);
   1961 
   1962   // Move result to edi and exit the internal frame.
   1963   __ mov(edi, eax);
   1964 }
   1965 
   1966 
   1967 bool CEntryStub::NeedsImmovableCode() {
   1968   return false;
   1969 }
   1970 
   1971 
   1972 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
   1973   CEntryStub::GenerateAheadOfTime(isolate);
   1974   StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
   1975   StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
   1976   // It is important that the store buffer overflow stubs are generated first.
   1977   ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
   1978   CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
   1979   CreateWeakCellStub::GenerateAheadOfTime(isolate);
   1980   BinaryOpICStub::GenerateAheadOfTime(isolate);
   1981   BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
   1982   StoreFastElementStub::GenerateAheadOfTime(isolate);
   1983   TypeofStub::GenerateAheadOfTime(isolate);
   1984 }
   1985 
   1986 
   1987 void CodeStub::GenerateFPStubs(Isolate* isolate) {
   1988   CEntryStub save_doubles(isolate, 1, kSaveFPRegs);
   1989   // Stubs might already be in the snapshot, detect that and don't regenerate,
   1990   // which would lead to code stub initialization state being messed up.
   1991   Code* save_doubles_code;
   1992   if (!save_doubles.FindCodeInCache(&save_doubles_code)) {
   1993     save_doubles_code = *(save_doubles.GetCode());
   1994   }
   1995   isolate->set_fp_stubs_generated(true);
   1996 }
   1997 
   1998 
   1999 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
   2000   CEntryStub stub(isolate, 1, kDontSaveFPRegs);
   2001   stub.GetCode();
   2002 }
   2003 
   2004 
   2005 void CEntryStub::Generate(MacroAssembler* masm) {
   2006   // eax: number of arguments including receiver
   2007   // ebx: pointer to C function  (C callee-saved)
   2008   // ebp: frame pointer  (restored after C call)
   2009   // esp: stack pointer  (restored after C call)
   2010   // esi: current context (C callee-saved)
   2011   // edi: JS function of the caller (C callee-saved)
   2012   //
   2013   // If argv_in_register():
   2014   // ecx: pointer to the first argument
   2015 
   2016   ProfileEntryHookStub::MaybeCallEntryHook(masm);
   2017 
   2018   // Enter the exit frame that transitions from JavaScript to C++.
   2019   if (argv_in_register()) {
   2020     DCHECK(!save_doubles());
   2021     __ EnterApiExitFrame(3);
   2022 
   2023     // Move argc and argv into the correct registers.
   2024     __ mov(esi, ecx);
   2025     __ mov(edi, eax);
   2026   } else {
   2027     __ EnterExitFrame(save_doubles());
   2028   }
   2029 
   2030   // ebx: pointer to C function  (C callee-saved)
   2031   // ebp: frame pointer  (restored after C call)
   2032   // esp: stack pointer  (restored after C call)
   2033   // edi: number of arguments including receiver  (C callee-saved)
   2034   // esi: pointer to the first argument (C callee-saved)
   2035 
   2036   // Result returned in eax, or eax+edx if result size is 2.
   2037 
   2038   // Check stack alignment.
   2039   if (FLAG_debug_code) {
   2040     __ CheckStackAlignment();
   2041   }
   2042 
   2043   // Call C function.
   2044   __ mov(Operand(esp, 0 * kPointerSize), edi);  // argc.
   2045   __ mov(Operand(esp, 1 * kPointerSize), esi);  // argv.
   2046   __ mov(Operand(esp, 2 * kPointerSize),
   2047          Immediate(ExternalReference::isolate_address(isolate())));
   2048   __ call(ebx);
   2049   // Result is in eax or edx:eax - do not destroy these registers!
   2050 
   2051   // Check result for exception sentinel.
   2052   Label exception_returned;
   2053   __ cmp(eax, isolate()->factory()->exception());
   2054   __ j(equal, &exception_returned);
   2055 
   2056   // Check that there is no pending exception, otherwise we
   2057   // should have returned the exception sentinel.
   2058   if (FLAG_debug_code) {
   2059     __ push(edx);
   2060     __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
   2061     Label okay;
   2062     ExternalReference pending_exception_address(
   2063         Isolate::kPendingExceptionAddress, isolate());
   2064     __ cmp(edx, Operand::StaticVariable(pending_exception_address));
   2065     // Cannot use check here as it attempts to generate call into runtime.
   2066     __ j(equal, &okay, Label::kNear);
   2067     __ int3();
   2068     __ bind(&okay);
   2069     __ pop(edx);
   2070   }
   2071 
   2072   // Exit the JavaScript to C++ exit frame.
   2073   __ LeaveExitFrame(save_doubles(), !argv_in_register());
   2074   __ ret(0);
   2075 
   2076   // Handling of exception.
   2077   __ bind(&exception_returned);
   2078 
   2079   ExternalReference pending_handler_context_address(
   2080       Isolate::kPendingHandlerContextAddress, isolate());
   2081   ExternalReference pending_handler_code_address(
   2082       Isolate::kPendingHandlerCodeAddress, isolate());
   2083   ExternalReference pending_handler_offset_address(
   2084       Isolate::kPendingHandlerOffsetAddress, isolate());
   2085   ExternalReference pending_handler_fp_address(
   2086       Isolate::kPendingHandlerFPAddress, isolate());
   2087   ExternalReference pending_handler_sp_address(
   2088       Isolate::kPendingHandlerSPAddress, isolate());
   2089 
   2090   // Ask the runtime for help to determine the handler. This will set eax to
   2091   // contain the current pending exception, don't clobber it.
   2092   ExternalReference find_handler(Runtime::kUnwindAndFindExceptionHandler,
   2093                                  isolate());
   2094   {
   2095     FrameScope scope(masm, StackFrame::MANUAL);
   2096     __ PrepareCallCFunction(3, eax);
   2097     __ mov(Operand(esp, 0 * kPointerSize), Immediate(0));  // argc.
   2098     __ mov(Operand(esp, 1 * kPointerSize), Immediate(0));  // argv.
   2099     __ mov(Operand(esp, 2 * kPointerSize),
   2100            Immediate(ExternalReference::isolate_address(isolate())));
   2101     __ CallCFunction(find_handler, 3);
   2102   }
   2103 
   2104   // Retrieve the handler context, SP and FP.
   2105   __ mov(esi, Operand::StaticVariable(pending_handler_context_address));
   2106   __ mov(esp, Operand::StaticVariable(pending_handler_sp_address));
   2107   __ mov(ebp, Operand::StaticVariable(pending_handler_fp_address));
   2108 
   2109   // If the handler is a JS frame, restore the context to the frame. Note that
   2110   // the context will be set to (esi == 0) for non-JS frames.
   2111   Label skip;
   2112   __ test(esi, esi);
   2113   __ j(zero, &skip, Label::kNear);
   2114   __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
   2115   __ bind(&skip);
   2116 
   2117   // Compute the handler entry address and jump to it.
   2118   __ mov(edi, Operand::StaticVariable(pending_handler_code_address));
   2119   __ mov(edx, Operand::StaticVariable(pending_handler_offset_address));
   2120   __ lea(edi, FieldOperand(edi, edx, times_1, Code::kHeaderSize));
   2121   __ jmp(edi);
   2122 }
   2123 
   2124 
   2125 void JSEntryStub::Generate(MacroAssembler* masm) {
   2126   Label invoke, handler_entry, exit;
   2127   Label not_outermost_js, not_outermost_js_2;
   2128 
   2129   ProfileEntryHookStub::MaybeCallEntryHook(masm);
   2130 
   2131   // Set up frame.
   2132   __ push(ebp);
   2133   __ mov(ebp, esp);
   2134 
   2135   // Push marker in two places.
   2136   int marker = type();
   2137   __ push(Immediate(Smi::FromInt(marker)));  // context slot
   2138   __ push(Immediate(Smi::FromInt(marker)));  // function slot
   2139   // Save callee-saved registers (C calling conventions).
   2140   __ push(edi);
   2141   __ push(esi);
   2142   __ push(ebx);
   2143 
   2144   // Save copies of the top frame descriptor on the stack.
   2145   ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate());
   2146   __ push(Operand::StaticVariable(c_entry_fp));
   2147 
   2148   // If this is the outermost JS call, set js_entry_sp value.
   2149   ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate());
   2150   __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0));
   2151   __ j(not_equal, &not_outermost_js, Label::kNear);
   2152   __ mov(Operand::StaticVariable(js_entry_sp), ebp);
   2153   __ push(Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
   2154   __ jmp(&invoke, Label::kNear);
   2155   __ bind(&not_outermost_js);
   2156   __ push(Immediate(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
   2157 
   2158   // Jump to a faked try block that does the invoke, with a faked catch
   2159   // block that sets the pending exception.
   2160   __ jmp(&invoke);
   2161   __ bind(&handler_entry);
   2162   handler_offset_ = handler_entry.pos();
   2163   // Caught exception: Store result (exception) in the pending exception
   2164   // field in the JSEnv and return a failure sentinel.
   2165   ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
   2166                                       isolate());
   2167   __ mov(Operand::StaticVariable(pending_exception), eax);
   2168   __ mov(eax, Immediate(isolate()->factory()->exception()));
   2169   __ jmp(&exit);
   2170 
   2171   // Invoke: Link this frame into the handler chain.
   2172   __ bind(&invoke);
   2173   __ PushStackHandler();
   2174 
   2175   // Clear any pending exceptions.
   2176   __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
   2177   __ mov(Operand::StaticVariable(pending_exception), edx);
   2178 
   2179   // Fake a receiver (NULL).
   2180   __ push(Immediate(0));  // receiver
   2181 
   2182   // Invoke the function by calling through JS entry trampoline builtin and
   2183   // pop the faked function when we return. Notice that we cannot store a
   2184   // reference to the trampoline code directly in this stub, because the
   2185   // builtin stubs may not have been generated yet.
   2186   if (type() == StackFrame::ENTRY_CONSTRUCT) {
   2187     ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
   2188                                       isolate());
   2189     __ mov(edx, Immediate(construct_entry));
   2190   } else {
   2191     ExternalReference entry(Builtins::kJSEntryTrampoline, isolate());
   2192     __ mov(edx, Immediate(entry));
   2193   }
   2194   __ mov(edx, Operand(edx, 0));  // deref address
   2195   __ lea(edx, FieldOperand(edx, Code::kHeaderSize));
   2196   __ call(edx);
   2197 
   2198   // Unlink this frame from the handler chain.
   2199   __ PopStackHandler();
   2200 
   2201   __ bind(&exit);
   2202   // Check if the current stack frame is marked as the outermost JS frame.
   2203   __ pop(ebx);
   2204   __ cmp(ebx, Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
   2205   __ j(not_equal, &not_outermost_js_2);
   2206   __ mov(Operand::StaticVariable(js_entry_sp), Immediate(0));
   2207   __ bind(&not_outermost_js_2);
   2208 
   2209   // Restore the top frame descriptor from the stack.
   2210   __ pop(Operand::StaticVariable(ExternalReference(
   2211       Isolate::kCEntryFPAddress, isolate())));
   2212 
   2213   // Restore callee-saved registers (C calling conventions).
   2214   __ pop(ebx);
   2215   __ pop(esi);
   2216   __ pop(edi);
   2217   __ add(esp, Immediate(2 * kPointerSize));  // remove markers
   2218 
   2219   // Restore frame pointer and return.
   2220   __ pop(ebp);
   2221   __ ret(0);
   2222 }
   2223 
   2224 
   2225 void InstanceOfStub::Generate(MacroAssembler* masm) {
   2226   Register const object = edx;                       // Object (lhs).
   2227   Register const function = eax;                     // Function (rhs).
   2228   Register const object_map = ecx;                   // Map of {object}.
   2229   Register const function_map = ebx;                 // Map of {function}.
   2230   Register const function_prototype = function_map;  // Prototype of {function}.
   2231   Register const scratch = edi;
   2232 
   2233   DCHECK(object.is(InstanceOfDescriptor::LeftRegister()));
   2234   DCHECK(function.is(InstanceOfDescriptor::RightRegister()));
   2235 
   2236   // Check if {object} is a smi.
   2237   Label object_is_smi;
   2238   __ JumpIfSmi(object, &object_is_smi, Label::kNear);
   2239 
   2240   // Lookup the {function} and the {object} map in the global instanceof cache.
   2241   // Note: This is safe because we clear the global instanceof cache whenever
   2242   // we change the prototype of any object.
   2243   Label fast_case, slow_case;
   2244   __ mov(object_map, FieldOperand(object, HeapObject::kMapOffset));
   2245   __ CompareRoot(function, scratch, Heap::kInstanceofCacheFunctionRootIndex);
   2246   __ j(not_equal, &fast_case, Label::kNear);
   2247   __ CompareRoot(object_map, scratch, Heap::kInstanceofCacheMapRootIndex);
   2248   __ j(not_equal, &fast_case, Label::kNear);
   2249   __ LoadRoot(eax, Heap::kInstanceofCacheAnswerRootIndex);
   2250   __ ret(0);
   2251 
   2252   // If {object} is a smi we can safely return false if {function} is a JS
   2253   // function, otherwise we have to miss to the runtime and throw an exception.
   2254   __ bind(&object_is_smi);
   2255   __ JumpIfSmi(function, &slow_case);
   2256   __ CmpObjectType(function, JS_FUNCTION_TYPE, function_map);
   2257   __ j(not_equal, &slow_case);
   2258   __ LoadRoot(eax, Heap::kFalseValueRootIndex);
   2259   __ ret(0);
   2260 
   2261   // Fast-case: The {function} must be a valid JSFunction.
   2262   __ bind(&fast_case);
   2263   __ JumpIfSmi(function, &slow_case);
   2264   __ CmpObjectType(function, JS_FUNCTION_TYPE, function_map);
   2265   __ j(not_equal, &slow_case);
   2266 
   2267   // Ensure that {function} has an instance prototype.
   2268   __ test_b(FieldOperand(function_map, Map::kBitFieldOffset),
   2269             static_cast<uint8_t>(1 << Map::kHasNonInstancePrototype));
   2270   __ j(not_zero, &slow_case);
   2271 
   2272   // Get the "prototype" (or initial map) of the {function}.
   2273   __ mov(function_prototype,
   2274          FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
   2275   __ AssertNotSmi(function_prototype);
   2276 
   2277   // Resolve the prototype if the {function} has an initial map.  Afterwards the
   2278   // {function_prototype} will be either the JSReceiver prototype object or the
   2279   // hole value, which means that no instances of the {function} were created so
   2280   // far and hence we should return false.
   2281   Label function_prototype_valid;
   2282   Register const function_prototype_map = scratch;
   2283   __ CmpObjectType(function_prototype, MAP_TYPE, function_prototype_map);
   2284   __ j(not_equal, &function_prototype_valid, Label::kNear);
   2285   __ mov(function_prototype,
   2286          FieldOperand(function_prototype, Map::kPrototypeOffset));
   2287   __ bind(&function_prototype_valid);
   2288   __ AssertNotSmi(function_prototype);
   2289 
   2290   // Update the global instanceof cache with the current {object} map and
   2291   // {function}.  The cached answer will be set when it is known below.
   2292   __ StoreRoot(function, scratch, Heap::kInstanceofCacheFunctionRootIndex);
   2293   __ StoreRoot(object_map, scratch, Heap::kInstanceofCacheMapRootIndex);
   2294 
   2295   // Loop through the prototype chain looking for the {function} prototype.
   2296   // Assume true, and change to false if not found.
   2297   Label done, loop, fast_runtime_fallback;
   2298   __ mov(eax, isolate()->factory()->true_value());
   2299   __ bind(&loop);
   2300 
   2301   // Check if the object needs to be access checked.
   2302   __ test_b(FieldOperand(object_map, Map::kBitFieldOffset),
   2303             1 << Map::kIsAccessCheckNeeded);
   2304   __ j(not_zero, &fast_runtime_fallback, Label::kNear);
   2305   // Check if the current object is a Proxy.
   2306   __ CmpInstanceType(object_map, JS_PROXY_TYPE);
   2307   __ j(equal, &fast_runtime_fallback, Label::kNear);
   2308 
   2309   __ mov(object, FieldOperand(object_map, Map::kPrototypeOffset));
   2310   __ cmp(object, function_prototype);
   2311   __ j(equal, &done, Label::kNear);
   2312   __ mov(object_map, FieldOperand(object, HeapObject::kMapOffset));
   2313   __ cmp(object, isolate()->factory()->null_value());
   2314   __ j(not_equal, &loop);
   2315   __ mov(eax, isolate()->factory()->false_value());
   2316 
   2317   __ bind(&done);
   2318   __ StoreRoot(eax, scratch, Heap::kInstanceofCacheAnswerRootIndex);
   2319   __ ret(0);
   2320 
   2321   // Found Proxy or access check needed: Call the runtime.
   2322   __ bind(&fast_runtime_fallback);
   2323   __ PopReturnAddressTo(scratch);
   2324   __ Push(object);
   2325   __ Push(function_prototype);
   2326   __ PushReturnAddressFrom(scratch);
   2327   // Invalidate the instanceof cache.
   2328   __ Move(eax, Immediate(Smi::FromInt(0)));
   2329   __ StoreRoot(eax, scratch, Heap::kInstanceofCacheFunctionRootIndex);
   2330   __ TailCallRuntime(Runtime::kHasInPrototypeChain);
   2331 
   2332   // Slow-case: Call the %InstanceOf runtime function.
   2333   __ bind(&slow_case);
   2334   __ PopReturnAddressTo(scratch);
   2335   __ Push(object);
   2336   __ Push(function);
   2337   __ PushReturnAddressFrom(scratch);
   2338   __ TailCallRuntime(Runtime::kInstanceOf);
   2339 }
   2340 
   2341 
   2342 // -------------------------------------------------------------------------
   2343 // StringCharCodeAtGenerator
   2344 
   2345 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
   2346   // If the receiver is a smi trigger the non-string case.
   2347   if (check_mode_ == RECEIVER_IS_UNKNOWN) {
   2348     __ JumpIfSmi(object_, receiver_not_string_);
   2349 
   2350     // Fetch the instance type of the receiver into result register.
   2351     __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
   2352     __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
   2353     // If the receiver is not a string trigger the non-string case.
   2354     __ test(result_, Immediate(kIsNotStringMask));
   2355     __ j(not_zero, receiver_not_string_);
   2356   }
   2357 
   2358   // If the index is non-smi trigger the non-smi case.
   2359   __ JumpIfNotSmi(index_, &index_not_smi_);
   2360   __ bind(&got_smi_index_);
   2361 
   2362   // Check for index out of range.
   2363   __ cmp(index_, FieldOperand(object_, String::kLengthOffset));
   2364   __ j(above_equal, index_out_of_range_);
   2365 
   2366   __ SmiUntag(index_);
   2367 
   2368   Factory* factory = masm->isolate()->factory();
   2369   StringCharLoadGenerator::Generate(
   2370       masm, factory, object_, index_, result_, &call_runtime_);
   2371 
   2372   __ SmiTag(result_);
   2373   __ bind(&exit_);
   2374 }
   2375 
   2376 
   2377 void StringCharCodeAtGenerator::GenerateSlow(
   2378     MacroAssembler* masm, EmbedMode embed_mode,
   2379     const RuntimeCallHelper& call_helper) {
   2380   __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
   2381 
   2382   // Index is not a smi.
   2383   __ bind(&index_not_smi_);
   2384   // If index is a heap number, try converting it to an integer.
   2385   __ CheckMap(index_,
   2386               masm->isolate()->factory()->heap_number_map(),
   2387               index_not_number_,
   2388               DONT_DO_SMI_CHECK);
   2389   call_helper.BeforeCall(masm);
   2390   if (embed_mode == PART_OF_IC_HANDLER) {
   2391     __ push(LoadWithVectorDescriptor::VectorRegister());
   2392     __ push(LoadDescriptor::SlotRegister());
   2393   }
   2394   __ push(object_);
   2395   __ push(index_);  // Consumed by runtime conversion function.
   2396   if (index_flags_ == STRING_INDEX_IS_NUMBER) {
   2397     __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero);
   2398   } else {
   2399     DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
   2400     // NumberToSmi discards numbers that are not exact integers.
   2401     __ CallRuntime(Runtime::kNumberToSmi);
   2402   }
   2403   if (!index_.is(eax)) {
   2404     // Save the conversion result before the pop instructions below
   2405     // have a chance to overwrite it.
   2406     __ mov(index_, eax);
   2407   }
   2408   __ pop(object_);
   2409   if (embed_mode == PART_OF_IC_HANDLER) {
   2410     __ pop(LoadDescriptor::SlotRegister());
   2411     __ pop(LoadWithVectorDescriptor::VectorRegister());
   2412   }
   2413   // Reload the instance type.
   2414   __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
   2415   __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
   2416   call_helper.AfterCall(masm);
   2417   // If index is still not a smi, it must be out of range.
   2418   STATIC_ASSERT(kSmiTag == 0);
   2419   __ JumpIfNotSmi(index_, index_out_of_range_);
   2420   // Otherwise, return to the fast path.
   2421   __ jmp(&got_smi_index_);
   2422 
   2423   // Call runtime. We get here when the receiver is a string and the
   2424   // index is a number, but the code of getting the actual character
   2425   // is too complex (e.g., when the string needs to be flattened).
   2426   __ bind(&call_runtime_);
   2427   call_helper.BeforeCall(masm);
   2428   __ push(object_);
   2429   __ SmiTag(index_);
   2430   __ push(index_);
   2431   __ CallRuntime(Runtime::kStringCharCodeAtRT);
   2432   if (!result_.is(eax)) {
   2433     __ mov(result_, eax);
   2434   }
   2435   call_helper.AfterCall(masm);
   2436   __ jmp(&exit_);
   2437 
   2438   __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
   2439 }
   2440 
   2441 
   2442 // -------------------------------------------------------------------------
   2443 // StringCharFromCodeGenerator
   2444 
   2445 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
   2446   // Fast case of Heap::LookupSingleCharacterStringFromCode.
   2447   STATIC_ASSERT(kSmiTag == 0);
   2448   STATIC_ASSERT(kSmiShiftSize == 0);
   2449   DCHECK(base::bits::IsPowerOfTwo32(String::kMaxOneByteCharCodeU + 1));
   2450   __ test(code_, Immediate(kSmiTagMask |
   2451                            ((~String::kMaxOneByteCharCodeU) << kSmiTagSize)));
   2452   __ j(not_zero, &slow_case_);
   2453 
   2454   Factory* factory = masm->isolate()->factory();
   2455   __ Move(result_, Immediate(factory->single_character_string_cache()));
   2456   STATIC_ASSERT(kSmiTag == 0);
   2457   STATIC_ASSERT(kSmiTagSize == 1);
   2458   STATIC_ASSERT(kSmiShiftSize == 0);
   2459   // At this point code register contains smi tagged one byte char code.
   2460   __ mov(result_, FieldOperand(result_,
   2461                                code_, times_half_pointer_size,
   2462                                FixedArray::kHeaderSize));
   2463   __ cmp(result_, factory->undefined_value());
   2464   __ j(equal, &slow_case_);
   2465   __ bind(&exit_);
   2466 }
   2467 
   2468 
   2469 void StringCharFromCodeGenerator::GenerateSlow(
   2470     MacroAssembler* masm,
   2471     const RuntimeCallHelper& call_helper) {
   2472   __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
   2473 
   2474   __ bind(&slow_case_);
   2475   call_helper.BeforeCall(masm);
   2476   __ push(code_);
   2477   __ CallRuntime(Runtime::kStringCharFromCode);
   2478   if (!result_.is(eax)) {
   2479     __ mov(result_, eax);
   2480   }
   2481   call_helper.AfterCall(masm);
   2482   __ jmp(&exit_);
   2483 
   2484   __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
   2485 }
   2486 
   2487 
   2488 void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
   2489                                           Register dest,
   2490                                           Register src,
   2491                                           Register count,
   2492                                           Register scratch,
   2493                                           String::Encoding encoding) {
   2494   DCHECK(!scratch.is(dest));
   2495   DCHECK(!scratch.is(src));
   2496   DCHECK(!scratch.is(count));
   2497 
   2498   // Nothing to do for zero characters.
   2499   Label done;
   2500   __ test(count, count);
   2501   __ j(zero, &done);
   2502 
   2503   // Make count the number of bytes to copy.
   2504   if (encoding == String::TWO_BYTE_ENCODING) {
   2505     __ shl(count, 1);
   2506   }
   2507 
   2508   Label loop;
   2509   __ bind(&loop);
   2510   __ mov_b(scratch, Operand(src, 0));
   2511   __ mov_b(Operand(dest, 0), scratch);
   2512   __ inc(src);
   2513   __ inc(dest);
   2514   __ dec(count);
   2515   __ j(not_zero, &loop);
   2516 
   2517   __ bind(&done);
   2518 }
   2519 
   2520 
   2521 void SubStringStub::Generate(MacroAssembler* masm) {
   2522   Label runtime;
   2523 
   2524   // Stack frame on entry.
   2525   //  esp[0]: return address
   2526   //  esp[4]: to
   2527   //  esp[8]: from
   2528   //  esp[12]: string
   2529 
   2530   // Make sure first argument is a string.
   2531   __ mov(eax, Operand(esp, 3 * kPointerSize));
   2532   STATIC_ASSERT(kSmiTag == 0);
   2533   __ JumpIfSmi(eax, &runtime);
   2534   Condition is_string = masm->IsObjectStringType(eax, ebx, ebx);
   2535   __ j(NegateCondition(is_string), &runtime);
   2536 
   2537   // eax: string
   2538   // ebx: instance type
   2539 
   2540   // Calculate length of sub string using the smi values.
   2541   __ mov(ecx, Operand(esp, 1 * kPointerSize));  // To index.
   2542   __ JumpIfNotSmi(ecx, &runtime);
   2543   __ mov(edx, Operand(esp, 2 * kPointerSize));  // From index.
   2544   __ JumpIfNotSmi(edx, &runtime);
   2545   __ sub(ecx, edx);
   2546   __ cmp(ecx, FieldOperand(eax, String::kLengthOffset));
   2547   Label not_original_string;
   2548   // Shorter than original string's length: an actual substring.
   2549   __ j(below, &not_original_string, Label::kNear);
   2550   // Longer than original string's length or negative: unsafe arguments.
   2551   __ j(above, &runtime);
   2552   // Return original string.
   2553   Counters* counters = isolate()->counters();
   2554   __ IncrementCounter(counters->sub_string_native(), 1);
   2555   __ ret(3 * kPointerSize);
   2556   __ bind(&not_original_string);
   2557 
   2558   Label single_char;
   2559   __ cmp(ecx, Immediate(Smi::FromInt(1)));
   2560   __ j(equal, &single_char);
   2561 
   2562   // eax: string
   2563   // ebx: instance type
   2564   // ecx: sub string length (smi)
   2565   // edx: from index (smi)
   2566   // Deal with different string types: update the index if necessary
   2567   // and put the underlying string into edi.
   2568   Label underlying_unpacked, sliced_string, seq_or_external_string;
   2569   // If the string is not indirect, it can only be sequential or external.
   2570   STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
   2571   STATIC_ASSERT(kIsIndirectStringMask != 0);
   2572   __ test(ebx, Immediate(kIsIndirectStringMask));
   2573   __ j(zero, &seq_or_external_string, Label::kNear);
   2574 
   2575   Factory* factory = isolate()->factory();
   2576   __ test(ebx, Immediate(kSlicedNotConsMask));
   2577   __ j(not_zero, &sliced_string, Label::kNear);
   2578   // Cons string.  Check whether it is flat, then fetch first part.
   2579   // Flat cons strings have an empty second part.
   2580   __ cmp(FieldOperand(eax, ConsString::kSecondOffset),
   2581          factory->empty_string());
   2582   __ j(not_equal, &runtime);
   2583   __ mov(edi, FieldOperand(eax, ConsString::kFirstOffset));
   2584   // Update instance type.
   2585   __ mov(ebx, FieldOperand(edi, HeapObject::kMapOffset));
   2586   __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
   2587   __ jmp(&underlying_unpacked, Label::kNear);
   2588 
   2589   __ bind(&sliced_string);
   2590   // Sliced string.  Fetch parent and adjust start index by offset.
   2591   __ add(edx, FieldOperand(eax, SlicedString::kOffsetOffset));
   2592   __ mov(edi, FieldOperand(eax, SlicedString::kParentOffset));
   2593   // Update instance type.
   2594   __ mov(ebx, FieldOperand(edi, HeapObject::kMapOffset));
   2595   __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
   2596   __ jmp(&underlying_unpacked, Label::kNear);
   2597 
   2598   __ bind(&seq_or_external_string);
   2599   // Sequential or external string.  Just move string to the expected register.
   2600   __ mov(edi, eax);
   2601 
   2602   __ bind(&underlying_unpacked);
   2603 
   2604   if (FLAG_string_slices) {
   2605     Label copy_routine;
   2606     // edi: underlying subject string
   2607     // ebx: instance type of underlying subject string
   2608     // edx: adjusted start index (smi)
   2609     // ecx: length (smi)
   2610     __ cmp(ecx, Immediate(Smi::FromInt(SlicedString::kMinLength)));
   2611     // Short slice.  Copy instead of slicing.
   2612     __ j(less, &copy_routine);
   2613     // Allocate new sliced string.  At this point we do not reload the instance
   2614     // type including the string encoding because we simply rely on the info
   2615     // provided by the original string.  It does not matter if the original
   2616     // string's encoding is wrong because we always have to recheck encoding of
   2617     // the newly created string's parent anyways due to externalized strings.
   2618     Label two_byte_slice, set_slice_header;
   2619     STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
   2620     STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
   2621     __ test(ebx, Immediate(kStringEncodingMask));
   2622     __ j(zero, &two_byte_slice, Label::kNear);
   2623     __ AllocateOneByteSlicedString(eax, ebx, no_reg, &runtime);
   2624     __ jmp(&set_slice_header, Label::kNear);
   2625     __ bind(&two_byte_slice);
   2626     __ AllocateTwoByteSlicedString(eax, ebx, no_reg, &runtime);
   2627     __ bind(&set_slice_header);
   2628     __ mov(FieldOperand(eax, SlicedString::kLengthOffset), ecx);
   2629     __ mov(FieldOperand(eax, SlicedString::kHashFieldOffset),
   2630            Immediate(String::kEmptyHashField));
   2631     __ mov(FieldOperand(eax, SlicedString::kParentOffset), edi);
   2632     __ mov(FieldOperand(eax, SlicedString::kOffsetOffset), edx);
   2633     __ IncrementCounter(counters->sub_string_native(), 1);
   2634     __ ret(3 * kPointerSize);
   2635 
   2636     __ bind(&copy_routine);
   2637   }
   2638 
   2639   // edi: underlying subject string
   2640   // ebx: instance type of underlying subject string
   2641   // edx: adjusted start index (smi)
   2642   // ecx: length (smi)
   2643   // The subject string can only be external or sequential string of either
   2644   // encoding at this point.
   2645   Label two_byte_sequential, runtime_drop_two, sequential_string;
   2646   STATIC_ASSERT(kExternalStringTag != 0);
   2647   STATIC_ASSERT(kSeqStringTag == 0);
   2648   __ test_b(ebx, kExternalStringTag);
   2649   __ j(zero, &sequential_string);
   2650 
   2651   // Handle external string.
   2652   // Rule out short external strings.
   2653   STATIC_ASSERT(kShortExternalStringTag != 0);
   2654   __ test_b(ebx, kShortExternalStringMask);
   2655   __ j(not_zero, &runtime);
   2656   __ mov(edi, FieldOperand(edi, ExternalString::kResourceDataOffset));
   2657   // Move the pointer so that offset-wise, it looks like a sequential string.
   2658   STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
   2659   __ sub(edi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
   2660 
   2661   __ bind(&sequential_string);
   2662   // Stash away (adjusted) index and (underlying) string.
   2663   __ push(edx);
   2664   __ push(edi);
   2665   __ SmiUntag(ecx);
   2666   STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
   2667   __ test_b(ebx, kStringEncodingMask);
   2668   __ j(zero, &two_byte_sequential);
   2669 
   2670   // Sequential one byte string.  Allocate the result.
   2671   __ AllocateOneByteString(eax, ecx, ebx, edx, edi, &runtime_drop_two);
   2672 
   2673   // eax: result string
   2674   // ecx: result string length
   2675   // Locate first character of result.
   2676   __ mov(edi, eax);
   2677   __ add(edi, Immediate(SeqOneByteString::kHeaderSize - kHeapObjectTag));
   2678   // Load string argument and locate character of sub string start.
   2679   __ pop(edx);
   2680   __ pop(ebx);
   2681   __ SmiUntag(ebx);
   2682   __ lea(edx, FieldOperand(edx, ebx, times_1, SeqOneByteString::kHeaderSize));
   2683 
   2684   // eax: result string
   2685   // ecx: result length
   2686   // edi: first character of result
   2687   // edx: character of sub string start
   2688   StringHelper::GenerateCopyCharacters(
   2689       masm, edi, edx, ecx, ebx, String::ONE_BYTE_ENCODING);
   2690   __ IncrementCounter(counters->sub_string_native(), 1);
   2691   __ ret(3 * kPointerSize);
   2692 
   2693   __ bind(&two_byte_sequential);
   2694   // Sequential two-byte string.  Allocate the result.
   2695   __ AllocateTwoByteString(eax, ecx, ebx, edx, edi, &runtime_drop_two);
   2696 
   2697   // eax: result string
   2698   // ecx: result string length
   2699   // Locate first character of result.
   2700   __ mov(edi, eax);
   2701   __ add(edi,
   2702          Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
   2703   // Load string argument and locate character of sub string start.
   2704   __ pop(edx);
   2705   __ pop(ebx);
   2706   // As from is a smi it is 2 times the value which matches the size of a two
   2707   // byte character.
   2708   STATIC_ASSERT(kSmiTag == 0);
   2709   STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
   2710   __ lea(edx, FieldOperand(edx, ebx, times_1, SeqTwoByteString::kHeaderSize));
   2711 
   2712   // eax: result string
   2713   // ecx: result length
   2714   // edi: first character of result
   2715   // edx: character of sub string start
   2716   StringHelper::GenerateCopyCharacters(
   2717       masm, edi, edx, ecx, ebx, String::TWO_BYTE_ENCODING);
   2718   __ IncrementCounter(counters->sub_string_native(), 1);
   2719   __ ret(3 * kPointerSize);
   2720 
   2721   // Drop pushed values on the stack before tail call.
   2722   __ bind(&runtime_drop_two);
   2723   __ Drop(2);
   2724 
   2725   // Just jump to runtime to create the sub string.
   2726   __ bind(&runtime);
   2727   __ TailCallRuntime(Runtime::kSubString);
   2728 
   2729   __ bind(&single_char);
   2730   // eax: string
   2731   // ebx: instance type
   2732   // ecx: sub string length (smi)
   2733   // edx: from index (smi)
   2734   StringCharAtGenerator generator(eax, edx, ecx, eax, &runtime, &runtime,
   2735                                   &runtime, STRING_INDEX_IS_NUMBER,
   2736                                   RECEIVER_IS_STRING);
   2737   generator.GenerateFast(masm);
   2738   __ ret(3 * kPointerSize);
   2739   generator.SkipSlow(masm, &runtime);
   2740 }
   2741 
   2742 
   2743 void ToNumberStub::Generate(MacroAssembler* masm) {
   2744   // The ToNumber stub takes one argument in eax.
   2745   Label not_smi;
   2746   __ JumpIfNotSmi(eax, &not_smi, Label::kNear);
   2747   __ Ret();
   2748   __ bind(&not_smi);
   2749 
   2750   Label not_heap_number;
   2751   __ CompareMap(eax, masm->isolate()->factory()->heap_number_map());
   2752   __ j(not_equal, &not_heap_number, Label::kNear);
   2753   __ Ret();
   2754   __ bind(&not_heap_number);
   2755 
   2756   Label not_string, slow_string;
   2757   __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edi);
   2758   // eax: object
   2759   // edi: object map
   2760   __ j(above_equal, &not_string, Label::kNear);
   2761   // Check if string has a cached array index.
   2762   __ test(FieldOperand(eax, String::kHashFieldOffset),
   2763           Immediate(String::kContainsCachedArrayIndexMask));
   2764   __ j(not_zero, &slow_string, Label::kNear);
   2765   __ mov(eax, FieldOperand(eax, String::kHashFieldOffset));
   2766   __ IndexFromHash(eax, eax);
   2767   __ Ret();
   2768   __ bind(&slow_string);
   2769   __ pop(ecx);   // Pop return address.
   2770   __ push(eax);  // Push argument.
   2771   __ push(ecx);  // Push return address.
   2772   __ TailCallRuntime(Runtime::kStringToNumber);
   2773   __ bind(&not_string);
   2774 
   2775   Label not_oddball;
   2776   __ CmpInstanceType(edi, ODDBALL_TYPE);
   2777   __ j(not_equal, &not_oddball, Label::kNear);
   2778   __ mov(eax, FieldOperand(eax, Oddball::kToNumberOffset));
   2779   __ Ret();
   2780   __ bind(&not_oddball);
   2781 
   2782   __ pop(ecx);   // Pop return address.
   2783   __ push(eax);  // Push argument.
   2784   __ push(ecx);  // Push return address.
   2785   __ TailCallRuntime(Runtime::kToNumber);
   2786 }
   2787 
   2788 
   2789 void ToLengthStub::Generate(MacroAssembler* masm) {
   2790   // The ToLength stub takes on argument in eax.
   2791   Label not_smi, positive_smi;
   2792   __ JumpIfNotSmi(eax, &not_smi, Label::kNear);
   2793   STATIC_ASSERT(kSmiTag == 0);
   2794   __ test(eax, eax);
   2795   __ j(greater_equal, &positive_smi, Label::kNear);
   2796   __ xor_(eax, eax);
   2797   __ bind(&positive_smi);
   2798   __ Ret();
   2799   __ bind(&not_smi);
   2800 
   2801   __ pop(ecx);   // Pop return address.
   2802   __ push(eax);  // Push argument.
   2803   __ push(ecx);  // Push return address.
   2804   __ TailCallRuntime(Runtime::kToLength);
   2805 }
   2806 
   2807 
   2808 void ToStringStub::Generate(MacroAssembler* masm) {
   2809   // The ToString stub takes one argument in eax.
   2810   Label is_number;
   2811   __ JumpIfSmi(eax, &is_number, Label::kNear);
   2812 
   2813   Label not_string;
   2814   __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edi);
   2815   // eax: receiver
   2816   // edi: receiver map
   2817   __ j(above_equal, &not_string, Label::kNear);
   2818   __ Ret();
   2819   __ bind(&not_string);
   2820 
   2821   Label not_heap_number;
   2822   __ CompareMap(eax, masm->isolate()->factory()->heap_number_map());
   2823   __ j(not_equal, &not_heap_number, Label::kNear);
   2824   __ bind(&is_number);
   2825   NumberToStringStub stub(isolate());
   2826   __ TailCallStub(&stub);
   2827   __ bind(&not_heap_number);
   2828 
   2829   Label not_oddball;
   2830   __ CmpInstanceType(edi, ODDBALL_TYPE);
   2831   __ j(not_equal, &not_oddball, Label::kNear);
   2832   __ mov(eax, FieldOperand(eax, Oddball::kToStringOffset));
   2833   __ Ret();
   2834   __ bind(&not_oddball);
   2835 
   2836   __ pop(ecx);   // Pop return address.
   2837   __ push(eax);  // Push argument.
   2838   __ push(ecx);  // Push return address.
   2839   __ TailCallRuntime(Runtime::kToString);
   2840 }
   2841 
   2842 
   2843 void StringHelper::GenerateFlatOneByteStringEquals(MacroAssembler* masm,
   2844                                                    Register left,
   2845                                                    Register right,
   2846                                                    Register scratch1,
   2847                                                    Register scratch2) {
   2848   Register length = scratch1;
   2849 
   2850   // Compare lengths.
   2851   Label strings_not_equal, check_zero_length;
   2852   __ mov(length, FieldOperand(left, String::kLengthOffset));
   2853   __ cmp(length, FieldOperand(right, String::kLengthOffset));
   2854   __ j(equal, &check_zero_length, Label::kNear);
   2855   __ bind(&strings_not_equal);
   2856   __ Move(eax, Immediate(Smi::FromInt(NOT_EQUAL)));
   2857   __ ret(0);
   2858 
   2859   // Check if the length is zero.
   2860   Label compare_chars;
   2861   __ bind(&check_zero_length);
   2862   STATIC_ASSERT(kSmiTag == 0);
   2863   __ test(length, length);
   2864   __ j(not_zero, &compare_chars, Label::kNear);
   2865   __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
   2866   __ ret(0);
   2867 
   2868   // Compare characters.
   2869   __ bind(&compare_chars);
   2870   GenerateOneByteCharsCompareLoop(masm, left, right, length, scratch2,
   2871                                   &strings_not_equal, Label::kNear);
   2872 
   2873   // Characters are equal.
   2874   __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
   2875   __ ret(0);
   2876 }
   2877 
   2878 
   2879 void StringHelper::GenerateCompareFlatOneByteStrings(
   2880     MacroAssembler* masm, Register left, Register right, Register scratch1,
   2881     Register scratch2, Register scratch3) {
   2882   Counters* counters = masm->isolate()->counters();
   2883   __ IncrementCounter(counters->string_compare_native(), 1);
   2884 
   2885   // Find minimum length.
   2886   Label left_shorter;
   2887   __ mov(scratch1, FieldOperand(left, String::kLengthOffset));
   2888   __ mov(scratch3, scratch1);
   2889   __ sub(scratch3, FieldOperand(right, String::kLengthOffset));
   2890 
   2891   Register length_delta = scratch3;
   2892 
   2893   __ j(less_equal, &left_shorter, Label::kNear);
   2894   // Right string is shorter. Change scratch1 to be length of right string.
   2895   __ sub(scratch1, length_delta);
   2896   __ bind(&left_shorter);
   2897 
   2898   Register min_length = scratch1;
   2899 
   2900   // If either length is zero, just compare lengths.
   2901   Label compare_lengths;
   2902   __ test(min_length, min_length);
   2903   __ j(zero, &compare_lengths, Label::kNear);
   2904 
   2905   // Compare characters.
   2906   Label result_not_equal;
   2907   GenerateOneByteCharsCompareLoop(masm, left, right, min_length, scratch2,
   2908                                   &result_not_equal, Label::kNear);
   2909 
   2910   // Compare lengths -  strings up to min-length are equal.
   2911   __ bind(&compare_lengths);
   2912   __ test(length_delta, length_delta);
   2913   Label length_not_equal;
   2914   __ j(not_zero, &length_not_equal, Label::kNear);
   2915 
   2916   // Result is EQUAL.
   2917   STATIC_ASSERT(EQUAL == 0);
   2918   STATIC_ASSERT(kSmiTag == 0);
   2919   __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
   2920   __ ret(0);
   2921 
   2922   Label result_greater;
   2923   Label result_less;
   2924   __ bind(&length_not_equal);
   2925   __ j(greater, &result_greater, Label::kNear);
   2926   __ jmp(&result_less, Label::kNear);
   2927   __ bind(&result_not_equal);
   2928   __ j(above, &result_greater, Label::kNear);
   2929   __ bind(&result_less);
   2930 
   2931   // Result is LESS.
   2932   __ Move(eax, Immediate(Smi::FromInt(LESS)));
   2933   __ ret(0);
   2934 
   2935   // Result is GREATER.
   2936   __ bind(&result_greater);
   2937   __ Move(eax, Immediate(Smi::FromInt(GREATER)));
   2938   __ ret(0);
   2939 }
   2940 
   2941 
   2942 void StringHelper::GenerateOneByteCharsCompareLoop(
   2943     MacroAssembler* masm, Register left, Register right, Register length,
   2944     Register scratch, Label* chars_not_equal,
   2945     Label::Distance chars_not_equal_near) {
   2946   // Change index to run from -length to -1 by adding length to string
   2947   // start. This means that loop ends when index reaches zero, which
   2948   // doesn't need an additional compare.
   2949   __ SmiUntag(length);
   2950   __ lea(left,
   2951          FieldOperand(left, length, times_1, SeqOneByteString::kHeaderSize));
   2952   __ lea(right,
   2953          FieldOperand(right, length, times_1, SeqOneByteString::kHeaderSize));
   2954   __ neg(length);
   2955   Register index = length;  // index = -length;
   2956 
   2957   // Compare loop.
   2958   Label loop;
   2959   __ bind(&loop);
   2960   __ mov_b(scratch, Operand(left, index, times_1, 0));
   2961   __ cmpb(scratch, Operand(right, index, times_1, 0));
   2962   __ j(not_equal, chars_not_equal, chars_not_equal_near);
   2963   __ inc(index);
   2964   __ j(not_zero, &loop);
   2965 }
   2966 
   2967 
   2968 void StringCompareStub::Generate(MacroAssembler* masm) {
   2969   // ----------- S t a t e -------------
   2970   //  -- edx    : left string
   2971   //  -- eax    : right string
   2972   //  -- esp[0] : return address
   2973   // -----------------------------------
   2974   __ AssertString(edx);
   2975   __ AssertString(eax);
   2976 
   2977   Label not_same;
   2978   __ cmp(edx, eax);
   2979   __ j(not_equal, &not_same, Label::kNear);
   2980   __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
   2981   __ IncrementCounter(isolate()->counters()->string_compare_native(), 1);
   2982   __ Ret();
   2983 
   2984   __ bind(&not_same);
   2985 
   2986   // Check that both objects are sequential one-byte strings.
   2987   Label runtime;
   2988   __ JumpIfNotBothSequentialOneByteStrings(edx, eax, ecx, ebx, &runtime);
   2989 
   2990   // Compare flat one-byte strings.
   2991   __ IncrementCounter(isolate()->counters()->string_compare_native(), 1);
   2992   StringHelper::GenerateCompareFlatOneByteStrings(masm, edx, eax, ecx, ebx,
   2993                                                   edi);
   2994 
   2995   // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
   2996   // tagged as a small integer.
   2997   __ bind(&runtime);
   2998   __ PopReturnAddressTo(ecx);
   2999   __ Push(edx);
   3000   __ Push(eax);
   3001   __ PushReturnAddressFrom(ecx);
   3002   __ TailCallRuntime(Runtime::kStringCompare);
   3003 }
   3004 
   3005 
   3006 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
   3007   // ----------- S t a t e -------------
   3008   //  -- edx    : left
   3009   //  -- eax    : right
   3010   //  -- esp[0] : return address
   3011   // -----------------------------------
   3012 
   3013   // Load ecx with the allocation site.  We stick an undefined dummy value here
   3014   // and replace it with the real allocation site later when we instantiate this
   3015   // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
   3016   __ mov(ecx, handle(isolate()->heap()->undefined_value()));
   3017 
   3018   // Make sure that we actually patched the allocation site.
   3019   if (FLAG_debug_code) {
   3020     __ test(ecx, Immediate(kSmiTagMask));
   3021     __ Assert(not_equal, kExpectedAllocationSite);
   3022     __ cmp(FieldOperand(ecx, HeapObject::kMapOffset),
   3023            isolate()->factory()->allocation_site_map());
   3024     __ Assert(equal, kExpectedAllocationSite);
   3025   }
   3026 
   3027   // Tail call into the stub that handles binary operations with allocation
   3028   // sites.
   3029   BinaryOpWithAllocationSiteStub stub(isolate(), state());
   3030   __ TailCallStub(&stub);
   3031 }
   3032 
   3033 
   3034 void CompareICStub::GenerateBooleans(MacroAssembler* masm) {
   3035   DCHECK_EQ(CompareICState::BOOLEAN, state());
   3036   Label miss;
   3037   Label::Distance const miss_distance =
   3038       masm->emit_debug_code() ? Label::kFar : Label::kNear;
   3039 
   3040   __ JumpIfSmi(edx, &miss, miss_distance);
   3041   __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
   3042   __ JumpIfSmi(eax, &miss, miss_distance);
   3043   __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
   3044   __ JumpIfNotRoot(ecx, Heap::kBooleanMapRootIndex, &miss, miss_distance);
   3045   __ JumpIfNotRoot(ebx, Heap::kBooleanMapRootIndex, &miss, miss_distance);
   3046   if (op() != Token::EQ_STRICT && is_strong(strength())) {
   3047     __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion);
   3048   } else {
   3049     if (!Token::IsEqualityOp(op())) {
   3050       __ mov(eax, FieldOperand(eax, Oddball::kToNumberOffset));
   3051       __ AssertSmi(eax);
   3052       __ mov(edx, FieldOperand(edx, Oddball::kToNumberOffset));
   3053       __ AssertSmi(edx);
   3054       __ xchg(eax, edx);
   3055     }
   3056     __ sub(eax, edx);
   3057     __ Ret();
   3058   }
   3059 
   3060   __ bind(&miss);
   3061   GenerateMiss(masm);
   3062 }
   3063 
   3064 
   3065 void CompareICStub::GenerateSmis(MacroAssembler* masm) {
   3066   DCHECK(state() == CompareICState::SMI);
   3067   Label miss;
   3068   __ mov(ecx, edx);
   3069   __ or_(ecx, eax);
   3070   __ JumpIfNotSmi(ecx, &miss, Label::kNear);
   3071 
   3072   if (GetCondition() == equal) {
   3073     // For equality we do not care about the sign of the result.
   3074     __ sub(eax, edx);
   3075   } else {
   3076     Label done;
   3077     __ sub(edx, eax);
   3078     __ j(no_overflow, &done, Label::kNear);
   3079     // Correct sign of result in case of overflow.
   3080     __ not_(edx);
   3081     __ bind(&done);
   3082     __ mov(eax, edx);
   3083   }
   3084   __ ret(0);
   3085 
   3086   __ bind(&miss);
   3087   GenerateMiss(masm);
   3088 }
   3089 
   3090 
   3091 void CompareICStub::GenerateNumbers(MacroAssembler* masm) {
   3092   DCHECK(state() == CompareICState::NUMBER);
   3093 
   3094   Label generic_stub, check_left;
   3095   Label unordered, maybe_undefined1, maybe_undefined2;
   3096   Label miss;
   3097 
   3098   if (left() == CompareICState::SMI) {
   3099     __ JumpIfNotSmi(edx, &miss);
   3100   }
   3101   if (right() == CompareICState::SMI) {
   3102     __ JumpIfNotSmi(eax, &miss);
   3103   }
   3104 
   3105   // Inlining the double comparison and falling back to the general compare
   3106   // stub if NaN is involved or SSE2 or CMOV is unsupported.
   3107   __ JumpIfSmi(eax, &check_left, Label::kNear);
   3108   __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
   3109          isolate()->factory()->heap_number_map());
   3110   __ j(not_equal, &maybe_undefined1, Label::kNear);
   3111 
   3112   __ bind(&check_left);
   3113   __ JumpIfSmi(edx, &generic_stub, Label::kNear);
   3114   __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
   3115          isolate()->factory()->heap_number_map());
   3116   __ j(not_equal, &maybe_undefined2, Label::kNear);
   3117 
   3118   __ bind(&unordered);
   3119   __ bind(&generic_stub);
   3120   CompareICStub stub(isolate(), op(), strength(), CompareICState::GENERIC,
   3121                      CompareICState::GENERIC, CompareICState::GENERIC);
   3122   __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
   3123 
   3124   __ bind(&maybe_undefined1);
   3125   if (Token::IsOrderedRelationalCompareOp(op())) {
   3126     __ cmp(eax, Immediate(isolate()->factory()->undefined_value()));
   3127     __ j(not_equal, &miss);
   3128     __ JumpIfSmi(edx, &unordered);
   3129     __ CmpObjectType(edx, HEAP_NUMBER_TYPE, ecx);
   3130     __ j(not_equal, &maybe_undefined2, Label::kNear);
   3131     __ jmp(&unordered);
   3132   }
   3133 
   3134   __ bind(&maybe_undefined2);
   3135   if (Token::IsOrderedRelationalCompareOp(op())) {
   3136     __ cmp(edx, Immediate(isolate()->factory()->undefined_value()));
   3137     __ j(equal, &unordered);
   3138   }
   3139 
   3140   __ bind(&miss);
   3141   GenerateMiss(masm);
   3142 }
   3143 
   3144 
   3145 void CompareICStub::GenerateInternalizedStrings(MacroAssembler* masm) {
   3146   DCHECK(state() == CompareICState::INTERNALIZED_STRING);
   3147   DCHECK(GetCondition() == equal);
   3148 
   3149   // Registers containing left and right operands respectively.
   3150   Register left = edx;
   3151   Register right = eax;
   3152   Register tmp1 = ecx;
   3153   Register tmp2 = ebx;
   3154 
   3155   // Check that both operands are heap objects.
   3156   Label miss;
   3157   __ mov(tmp1, left);
   3158   STATIC_ASSERT(kSmiTag == 0);
   3159   __ and_(tmp1, right);
   3160   __ JumpIfSmi(tmp1, &miss, Label::kNear);
   3161 
   3162   // Check that both operands are internalized strings.
   3163   __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
   3164   __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
   3165   __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
   3166   __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
   3167   STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
   3168   __ or_(tmp1, tmp2);
   3169   __ test(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
   3170   __ j(not_zero, &miss, Label::kNear);
   3171 
   3172   // Internalized strings are compared by identity.
   3173   Label done;
   3174   __ cmp(left, right);
   3175   // Make sure eax is non-zero. At this point input operands are
   3176   // guaranteed to be non-zero.
   3177   DCHECK(right.is(eax));
   3178   __ j(not_equal, &done, Label::kNear);
   3179   STATIC_ASSERT(EQUAL == 0);
   3180   STATIC_ASSERT(kSmiTag == 0);
   3181   __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
   3182   __ bind(&done);
   3183   __ ret(0);
   3184 
   3185   __ bind(&miss);
   3186   GenerateMiss(masm);
   3187 }
   3188 
   3189 
   3190 void CompareICStub::GenerateUniqueNames(MacroAssembler* masm) {
   3191   DCHECK(state() == CompareICState::UNIQUE_NAME);
   3192   DCHECK(GetCondition() == equal);
   3193 
   3194   // Registers containing left and right operands respectively.
   3195   Register left = edx;
   3196   Register right = eax;
   3197   Register tmp1 = ecx;
   3198   Register tmp2 = ebx;
   3199 
   3200   // Check that both operands are heap objects.
   3201   Label miss;
   3202   __ mov(tmp1, left);
   3203   STATIC_ASSERT(kSmiTag == 0);
   3204   __ and_(tmp1, right);
   3205   __ JumpIfSmi(tmp1, &miss, Label::kNear);
   3206 
   3207   // Check that both operands are unique names. This leaves the instance
   3208   // types loaded in tmp1 and tmp2.
   3209   __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
   3210   __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
   3211   __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
   3212   __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
   3213 
   3214   __ JumpIfNotUniqueNameInstanceType(tmp1, &miss, Label::kNear);
   3215   __ JumpIfNotUniqueNameInstanceType(tmp2, &miss, Label::kNear);
   3216 
   3217   // Unique names are compared by identity.
   3218   Label done;
   3219   __ cmp(left, right);
   3220   // Make sure eax is non-zero. At this point input operands are
   3221   // guaranteed to be non-zero.
   3222   DCHECK(right.is(eax));
   3223   __ j(not_equal, &done, Label::kNear);
   3224   STATIC_ASSERT(EQUAL == 0);
   3225   STATIC_ASSERT(kSmiTag == 0);
   3226   __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
   3227   __ bind(&done);
   3228   __ ret(0);
   3229 
   3230   __ bind(&miss);
   3231   GenerateMiss(masm);
   3232 }
   3233 
   3234 
   3235 void CompareICStub::GenerateStrings(MacroAssembler* masm) {
   3236   DCHECK(state() == CompareICState::STRING);
   3237   Label miss;
   3238 
   3239   bool equality = Token::IsEqualityOp(op());
   3240 
   3241   // Registers containing left and right operands respectively.
   3242   Register left = edx;
   3243   Register right = eax;
   3244   Register tmp1 = ecx;
   3245   Register tmp2 = ebx;
   3246   Register tmp3 = edi;
   3247 
   3248   // Check that both operands are heap objects.
   3249   __ mov(tmp1, left);
   3250   STATIC_ASSERT(kSmiTag == 0);
   3251   __ and_(tmp1, right);
   3252   __ JumpIfSmi(tmp1, &miss);
   3253 
   3254   // Check that both operands are strings. This leaves the instance
   3255   // types loaded in tmp1 and tmp2.
   3256   __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
   3257   __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
   3258   __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
   3259   __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
   3260   __ mov(tmp3, tmp1);
   3261   STATIC_ASSERT(kNotStringTag != 0);
   3262   __ or_(tmp3, tmp2);
   3263   __ test(tmp3, Immediate(kIsNotStringMask));
   3264   __ j(not_zero, &miss);
   3265 
   3266   // Fast check for identical strings.
   3267   Label not_same;
   3268   __ cmp(left, right);
   3269   __ j(not_equal, &not_same, Label::kNear);
   3270   STATIC_ASSERT(EQUAL == 0);
   3271   STATIC_ASSERT(kSmiTag == 0);
   3272   __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
   3273   __ ret(0);
   3274 
   3275   // Handle not identical strings.
   3276   __ bind(&not_same);
   3277 
   3278   // Check that both strings are internalized. If they are, we're done
   3279   // because we already know they are not identical.  But in the case of
   3280   // non-equality compare, we still need to determine the order. We
   3281   // also know they are both strings.
   3282   if (equality) {
   3283     Label do_compare;
   3284     STATIC_ASSERT(kInternalizedTag == 0);
   3285     __ or_(tmp1, tmp2);
   3286     __ test(tmp1, Immediate(kIsNotInternalizedMask));
   3287     __ j(not_zero, &do_compare, Label::kNear);
   3288     // Make sure eax is non-zero. At this point input operands are
   3289     // guaranteed to be non-zero.
   3290     DCHECK(right.is(eax));
   3291     __ ret(0);
   3292     __ bind(&do_compare);
   3293   }
   3294 
   3295   // Check that both strings are sequential one-byte.
   3296   Label runtime;
   3297   __ JumpIfNotBothSequentialOneByteStrings(left, right, tmp1, tmp2, &runtime);
   3298 
   3299   // Compare flat one byte strings. Returns when done.
   3300   if (equality) {
   3301     StringHelper::GenerateFlatOneByteStringEquals(masm, left, right, tmp1,
   3302                                                   tmp2);
   3303   } else {
   3304     StringHelper::GenerateCompareFlatOneByteStrings(masm, left, right, tmp1,
   3305                                                     tmp2, tmp3);
   3306   }
   3307 
   3308   // Handle more complex cases in runtime.
   3309   __ bind(&runtime);
   3310   __ pop(tmp1);  // Return address.
   3311   __ push(left);
   3312   __ push(right);
   3313   __ push(tmp1);
   3314   if (equality) {
   3315     __ TailCallRuntime(Runtime::kStringEquals);
   3316   } else {
   3317     __ TailCallRuntime(Runtime::kStringCompare);
   3318   }
   3319 
   3320   __ bind(&miss);
   3321   GenerateMiss(masm);
   3322 }
   3323 
   3324 
   3325 void CompareICStub::GenerateReceivers(MacroAssembler* masm) {
   3326   DCHECK_EQ(CompareICState::RECEIVER, state());
   3327   Label miss;
   3328   __ mov(ecx, edx);
   3329   __ and_(ecx, eax);
   3330   __ JumpIfSmi(ecx, &miss, Label::kNear);
   3331 
   3332   STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
   3333   __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
   3334   __ j(below, &miss, Label::kNear);
   3335   __ CmpObjectType(edx, FIRST_JS_RECEIVER_TYPE, ecx);
   3336   __ j(below, &miss, Label::kNear);
   3337 
   3338   DCHECK_EQ(equal, GetCondition());
   3339   __ sub(eax, edx);
   3340   __ ret(0);
   3341 
   3342   __ bind(&miss);
   3343   GenerateMiss(masm);
   3344 }
   3345 
   3346 
   3347 void CompareICStub::GenerateKnownReceivers(MacroAssembler* masm) {
   3348   Label miss;
   3349   Handle<WeakCell> cell = Map::WeakCellForMap(known_map_);
   3350   __ mov(ecx, edx);
   3351   __ and_(ecx, eax);
   3352   __ JumpIfSmi(ecx, &miss, Label::kNear);
   3353 
   3354   __ GetWeakValue(edi, cell);
   3355   __ cmp(edi, FieldOperand(eax, HeapObject::kMapOffset));
   3356   __ j(not_equal, &miss, Label::kNear);
   3357   __ cmp(edi, FieldOperand(edx, HeapObject::kMapOffset));
   3358   __ j(not_equal, &miss, Label::kNear);
   3359 
   3360   if (Token::IsEqualityOp(op())) {
   3361     __ sub(eax, edx);
   3362     __ ret(0);
   3363   } else if (is_strong(strength())) {
   3364     __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion);
   3365   } else {
   3366     __ PopReturnAddressTo(ecx);
   3367     __ Push(edx);
   3368     __ Push(eax);
   3369     __ Push(Immediate(Smi::FromInt(NegativeComparisonResult(GetCondition()))));
   3370     __ PushReturnAddressFrom(ecx);
   3371     __ TailCallRuntime(Runtime::kCompare);
   3372   }
   3373 
   3374   __ bind(&miss);
   3375   GenerateMiss(masm);
   3376 }
   3377 
   3378 
   3379 void CompareICStub::GenerateMiss(MacroAssembler* masm) {
   3380   {
   3381     // Call the runtime system in a fresh internal frame.
   3382     FrameScope scope(masm, StackFrame::INTERNAL);
   3383     __ push(edx);  // Preserve edx and eax.
   3384     __ push(eax);
   3385     __ push(edx);  // And also use them as the arguments.
   3386     __ push(eax);
   3387     __ push(Immediate(Smi::FromInt(op())));
   3388     __ CallRuntime(Runtime::kCompareIC_Miss);
   3389     // Compute the entry point of the rewritten stub.
   3390     __ lea(edi, FieldOperand(eax, Code::kHeaderSize));
   3391     __ pop(eax);
   3392     __ pop(edx);
   3393   }
   3394 
   3395   // Do a tail call to the rewritten stub.
   3396   __ jmp(edi);
   3397 }
   3398 
   3399 
   3400 // Helper function used to check that the dictionary doesn't contain
   3401 // the property. This function may return false negatives, so miss_label
   3402 // must always call a backup property check that is complete.
   3403 // This function is safe to call if the receiver has fast properties.
   3404 // Name must be a unique name and receiver must be a heap object.
   3405 void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
   3406                                                       Label* miss,
   3407                                                       Label* done,
   3408                                                       Register properties,
   3409                                                       Handle<Name> name,
   3410                                                       Register r0) {
   3411   DCHECK(name->IsUniqueName());
   3412 
   3413   // If names of slots in range from 1 to kProbes - 1 for the hash value are
   3414   // not equal to the name and kProbes-th slot is not used (its name is the
   3415   // undefined value), it guarantees the hash table doesn't contain the
   3416   // property. It's true even if some slots represent deleted properties
   3417   // (their names are the hole value).
   3418   for (int i = 0; i < kInlinedProbes; i++) {
   3419     // Compute the masked index: (hash + i + i * i) & mask.
   3420     Register index = r0;
   3421     // Capacity is smi 2^n.
   3422     __ mov(index, FieldOperand(properties, kCapacityOffset));
   3423     __ dec(index);
   3424     __ and_(index,
   3425             Immediate(Smi::FromInt(name->Hash() +
   3426                                    NameDictionary::GetProbeOffset(i))));
   3427 
   3428     // Scale the index by multiplying by the entry size.
   3429     STATIC_ASSERT(NameDictionary::kEntrySize == 3);
   3430     __ lea(index, Operand(index, index, times_2, 0));  // index *= 3.
   3431     Register entity_name = r0;
   3432     // Having undefined at this place means the name is not contained.
   3433     STATIC_ASSERT(kSmiTagSize == 1);
   3434     __ mov(entity_name, Operand(properties, index, times_half_pointer_size,
   3435                                 kElementsStartOffset - kHeapObjectTag));
   3436     __ cmp(entity_name, masm->isolate()->factory()->undefined_value());
   3437     __ j(equal, done);
   3438 
   3439     // Stop if found the property.
   3440     __ cmp(entity_name, Handle<Name>(name));
   3441     __ j(equal, miss);
   3442 
   3443     Label good;
   3444     // Check for the hole and skip.
   3445     __ cmp(entity_name, masm->isolate()->factory()->the_hole_value());
   3446     __ j(equal, &good, Label::kNear);
   3447 
   3448     // Check if the entry name is not a unique name.
   3449     __ mov(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
   3450     __ JumpIfNotUniqueNameInstanceType(
   3451         FieldOperand(entity_name, Map::kInstanceTypeOffset), miss);
   3452     __ bind(&good);
   3453   }
   3454 
   3455   NameDictionaryLookupStub stub(masm->isolate(), properties, r0, r0,
   3456                                 NEGATIVE_LOOKUP);
   3457   __ push(Immediate(Handle<Object>(name)));
   3458   __ push(Immediate(name->Hash()));
   3459   __ CallStub(&stub);
   3460   __ test(r0, r0);
   3461   __ j(not_zero, miss);
   3462   __ jmp(done);
   3463 }
   3464 
   3465 
   3466 // Probe the name dictionary in the |elements| register. Jump to the
   3467 // |done| label if a property with the given name is found leaving the
   3468 // index into the dictionary in |r0|. Jump to the |miss| label
   3469 // otherwise.
   3470 void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
   3471                                                       Label* miss,
   3472                                                       Label* done,
   3473                                                       Register elements,
   3474                                                       Register name,
   3475                                                       Register r0,
   3476                                                       Register r1) {
   3477   DCHECK(!elements.is(r0));
   3478   DCHECK(!elements.is(r1));
   3479   DCHECK(!name.is(r0));
   3480   DCHECK(!name.is(r1));
   3481 
   3482   __ AssertName(name);
   3483 
   3484   __ mov(r1, FieldOperand(elements, kCapacityOffset));
   3485   __ shr(r1, kSmiTagSize);  // convert smi to int
   3486   __ dec(r1);
   3487 
   3488   // Generate an unrolled loop that performs a few probes before
   3489   // giving up. Measurements done on Gmail indicate that 2 probes
   3490   // cover ~93% of loads from dictionaries.
   3491   for (int i = 0; i < kInlinedProbes; i++) {
   3492     // Compute the masked index: (hash + i + i * i) & mask.
   3493     __ mov(r0, FieldOperand(name, Name::kHashFieldOffset));
   3494     __ shr(r0, Name::kHashShift);
   3495     if (i > 0) {
   3496       __ add(r0, Immediate(NameDictionary::GetProbeOffset(i)));
   3497     }
   3498     __ and_(r0, r1);
   3499 
   3500     // Scale the index by multiplying by the entry size.
   3501     STATIC_ASSERT(NameDictionary::kEntrySize == 3);
   3502     __ lea(r0, Operand(r0, r0, times_2, 0));  // r0 = r0 * 3
   3503 
   3504     // Check if the key is identical to the name.
   3505     __ cmp(name, Operand(elements,
   3506                          r0,
   3507                          times_4,
   3508                          kElementsStartOffset - kHeapObjectTag));
   3509     __ j(equal, done);
   3510   }
   3511 
   3512   NameDictionaryLookupStub stub(masm->isolate(), elements, r1, r0,
   3513                                 POSITIVE_LOOKUP);
   3514   __ push(name);
   3515   __ mov(r0, FieldOperand(name, Name::kHashFieldOffset));
   3516   __ shr(r0, Name::kHashShift);
   3517   __ push(r0);
   3518   __ CallStub(&stub);
   3519 
   3520   __ test(r1, r1);
   3521   __ j(zero, miss);
   3522   __ jmp(done);
   3523 }
   3524 
   3525 
   3526 void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
   3527   // This stub overrides SometimesSetsUpAFrame() to return false.  That means
   3528   // we cannot call anything that could cause a GC from this stub.
   3529   // Stack frame on entry:
   3530   //  esp[0 * kPointerSize]: return address.
   3531   //  esp[1 * kPointerSize]: key's hash.
   3532   //  esp[2 * kPointerSize]: key.
   3533   // Registers:
   3534   //  dictionary_: NameDictionary to probe.
   3535   //  result_: used as scratch.
   3536   //  index_: will hold an index of entry if lookup is successful.
   3537   //          might alias with result_.
   3538   // Returns:
   3539   //  result_ is zero if lookup failed, non zero otherwise.
   3540 
   3541   Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
   3542 
   3543   Register scratch = result();
   3544 
   3545   __ mov(scratch, FieldOperand(dictionary(), kCapacityOffset));
   3546   __ dec(scratch);
   3547   __ SmiUntag(scratch);
   3548   __ push(scratch);
   3549 
   3550   // If names of slots in range from 1 to kProbes - 1 for the hash value are
   3551   // not equal to the name and kProbes-th slot is not used (its name is the
   3552   // undefined value), it guarantees the hash table doesn't contain the
   3553   // property. It's true even if some slots represent deleted properties
   3554   // (their names are the null value).
   3555   for (int i = kInlinedProbes; i < kTotalProbes; i++) {
   3556     // Compute the masked index: (hash + i + i * i) & mask.
   3557     __ mov(scratch, Operand(esp, 2 * kPointerSize));
   3558     if (i > 0) {
   3559       __ add(scratch, Immediate(NameDictionary::GetProbeOffset(i)));
   3560     }
   3561     __ and_(scratch, Operand(esp, 0));
   3562 
   3563     // Scale the index by multiplying by the entry size.
   3564     STATIC_ASSERT(NameDictionary::kEntrySize == 3);
   3565     __ lea(index(), Operand(scratch, scratch, times_2, 0));  // index *= 3.
   3566 
   3567     // Having undefined at this place means the name is not contained.
   3568     STATIC_ASSERT(kSmiTagSize == 1);
   3569     __ mov(scratch, Operand(dictionary(), index(), times_pointer_size,
   3570                             kElementsStartOffset - kHeapObjectTag));
   3571     __ cmp(scratch, isolate()->factory()->undefined_value());
   3572     __ j(equal, &not_in_dictionary);
   3573 
   3574     // Stop if found the property.
   3575     __ cmp(scratch, Operand(esp, 3 * kPointerSize));
   3576     __ j(equal, &in_dictionary);
   3577 
   3578     if (i != kTotalProbes - 1 && mode() == NEGATIVE_LOOKUP) {
   3579       // If we hit a key that is not a unique name during negative
   3580       // lookup we have to bailout as this key might be equal to the
   3581       // key we are looking for.
   3582 
   3583       // Check if the entry name is not a unique name.
   3584       __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
   3585       __ JumpIfNotUniqueNameInstanceType(
   3586           FieldOperand(scratch, Map::kInstanceTypeOffset),
   3587           &maybe_in_dictionary);
   3588     }
   3589   }
   3590 
   3591   __ bind(&maybe_in_dictionary);
   3592   // If we are doing negative lookup then probing failure should be
   3593   // treated as a lookup success. For positive lookup probing failure
   3594   // should be treated as lookup failure.
   3595   if (mode() == POSITIVE_LOOKUP) {
   3596     __ mov(result(), Immediate(0));
   3597     __ Drop(1);
   3598     __ ret(2 * kPointerSize);
   3599   }
   3600 
   3601   __ bind(&in_dictionary);
   3602   __ mov(result(), Immediate(1));
   3603   __ Drop(1);
   3604   __ ret(2 * kPointerSize);
   3605 
   3606   __ bind(&not_in_dictionary);
   3607   __ mov(result(), Immediate(0));
   3608   __ Drop(1);
   3609   __ ret(2 * kPointerSize);
   3610 }
   3611 
   3612 
   3613 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
   3614     Isolate* isolate) {
   3615   StoreBufferOverflowStub stub(isolate, kDontSaveFPRegs);
   3616   stub.GetCode();
   3617   StoreBufferOverflowStub stub2(isolate, kSaveFPRegs);
   3618   stub2.GetCode();
   3619 }
   3620 
   3621 
   3622 // Takes the input in 3 registers: address_ value_ and object_.  A pointer to
   3623 // the value has just been written into the object, now this stub makes sure
   3624 // we keep the GC informed.  The word in the object where the value has been
   3625 // written is in the address register.
   3626 void RecordWriteStub::Generate(MacroAssembler* masm) {
   3627   Label skip_to_incremental_noncompacting;
   3628   Label skip_to_incremental_compacting;
   3629 
   3630   // The first two instructions are generated with labels so as to get the
   3631   // offset fixed up correctly by the bind(Label*) call.  We patch it back and
   3632   // forth between a compare instructions (a nop in this position) and the
   3633   // real branch when we start and stop incremental heap marking.
   3634   __ jmp(&skip_to_incremental_noncompacting, Label::kNear);
   3635   __ jmp(&skip_to_incremental_compacting, Label::kFar);
   3636 
   3637   if (remembered_set_action() == EMIT_REMEMBERED_SET) {
   3638     __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
   3639                            MacroAssembler::kReturnAtEnd);
   3640   } else {
   3641     __ ret(0);
   3642   }
   3643 
   3644   __ bind(&skip_to_incremental_noncompacting);
   3645   GenerateIncremental(masm, INCREMENTAL);
   3646 
   3647   __ bind(&skip_to_incremental_compacting);
   3648   GenerateIncremental(masm, INCREMENTAL_COMPACTION);
   3649 
   3650   // Initial mode of the stub is expected to be STORE_BUFFER_ONLY.
   3651   // Will be checked in IncrementalMarking::ActivateGeneratedStub.
   3652   masm->set_byte_at(0, kTwoByteNopInstruction);
   3653   masm->set_byte_at(2, kFiveByteNopInstruction);
   3654 }
   3655 
   3656 
   3657 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
   3658   regs_.Save(masm);
   3659 
   3660   if (remembered_set_action() == EMIT_REMEMBERED_SET) {
   3661     Label dont_need_remembered_set;
   3662 
   3663     __ mov(regs_.scratch0(), Operand(regs_.address(), 0));
   3664     __ JumpIfNotInNewSpace(regs_.scratch0(),  // Value.
   3665                            regs_.scratch0(),
   3666                            &dont_need_remembered_set);
   3667 
   3668     __ CheckPageFlag(regs_.object(),
   3669                      regs_.scratch0(),
   3670                      1 << MemoryChunk::SCAN_ON_SCAVENGE,
   3671                      not_zero,
   3672                      &dont_need_remembered_set);
   3673 
   3674     // First notify the incremental marker if necessary, then update the
   3675     // remembered set.
   3676     CheckNeedsToInformIncrementalMarker(
   3677         masm,
   3678         kUpdateRememberedSetOnNoNeedToInformIncrementalMarker,
   3679         mode);
   3680     InformIncrementalMarker(masm);
   3681     regs_.Restore(masm);
   3682     __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
   3683                            MacroAssembler::kReturnAtEnd);
   3684 
   3685     __ bind(&dont_need_remembered_set);
   3686   }
   3687 
   3688   CheckNeedsToInformIncrementalMarker(
   3689       masm,
   3690       kReturnOnNoNeedToInformIncrementalMarker,
   3691       mode);
   3692   InformIncrementalMarker(masm);
   3693   regs_.Restore(masm);
   3694   __ ret(0);
   3695 }
   3696 
   3697 
   3698 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
   3699   regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode());
   3700   int argument_count = 3;
   3701   __ PrepareCallCFunction(argument_count, regs_.scratch0());
   3702   __ mov(Operand(esp, 0 * kPointerSize), regs_.object());
   3703   __ mov(Operand(esp, 1 * kPointerSize), regs_.address());  // Slot.
   3704   __ mov(Operand(esp, 2 * kPointerSize),
   3705          Immediate(ExternalReference::isolate_address(isolate())));
   3706 
   3707   AllowExternalCallThatCantCauseGC scope(masm);
   3708   __ CallCFunction(
   3709       ExternalReference::incremental_marking_record_write_function(isolate()),
   3710       argument_count);
   3711 
   3712   regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode());
   3713 }
   3714 
   3715 
   3716 void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
   3717     MacroAssembler* masm,
   3718     OnNoNeedToInformIncrementalMarker on_no_need,
   3719     Mode mode) {
   3720   Label object_is_black, need_incremental, need_incremental_pop_object;
   3721 
   3722   __ mov(regs_.scratch0(), Immediate(~Page::kPageAlignmentMask));
   3723   __ and_(regs_.scratch0(), regs_.object());
   3724   __ mov(regs_.scratch1(),
   3725          Operand(regs_.scratch0(),
   3726                  MemoryChunk::kWriteBarrierCounterOffset));
   3727   __ sub(regs_.scratch1(), Immediate(1));
   3728   __ mov(Operand(regs_.scratch0(),
   3729                  MemoryChunk::kWriteBarrierCounterOffset),
   3730          regs_.scratch1());
   3731   __ j(negative, &need_incremental);
   3732 
   3733   // Let's look at the color of the object:  If it is not black we don't have
   3734   // to inform the incremental marker.
   3735   __ JumpIfBlack(regs_.object(),
   3736                  regs_.scratch0(),
   3737                  regs_.scratch1(),
   3738                  &object_is_black,
   3739                  Label::kNear);
   3740 
   3741   regs_.Restore(masm);
   3742   if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
   3743     __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
   3744                            MacroAssembler::kReturnAtEnd);
   3745   } else {
   3746     __ ret(0);
   3747   }
   3748 
   3749   __ bind(&object_is_black);
   3750 
   3751   // Get the value from the slot.
   3752   __ mov(regs_.scratch0(), Operand(regs_.address(), 0));
   3753 
   3754   if (mode == INCREMENTAL_COMPACTION) {
   3755     Label ensure_not_white;
   3756 
   3757     __ CheckPageFlag(regs_.scratch0(),  // Contains value.
   3758                      regs_.scratch1(),  // Scratch.
   3759                      MemoryChunk::kEvacuationCandidateMask,
   3760                      zero,
   3761                      &ensure_not_white,
   3762                      Label::kNear);
   3763 
   3764     __ CheckPageFlag(regs_.object(),
   3765                      regs_.scratch1(),  // Scratch.
   3766                      MemoryChunk::kSkipEvacuationSlotsRecordingMask,
   3767                      not_zero,
   3768                      &ensure_not_white,
   3769                      Label::kNear);
   3770 
   3771     __ jmp(&need_incremental);
   3772 
   3773     __ bind(&ensure_not_white);
   3774   }
   3775 
   3776   // We need an extra register for this, so we push the object register
   3777   // temporarily.
   3778   __ push(regs_.object());
   3779   __ JumpIfWhite(regs_.scratch0(),  // The value.
   3780                  regs_.scratch1(),  // Scratch.
   3781                  regs_.object(),    // Scratch.
   3782                  &need_incremental_pop_object, Label::kNear);
   3783   __ pop(regs_.object());
   3784 
   3785   regs_.Restore(masm);
   3786   if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
   3787     __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
   3788                            MacroAssembler::kReturnAtEnd);
   3789   } else {
   3790     __ ret(0);
   3791   }
   3792 
   3793   __ bind(&need_incremental_pop_object);
   3794   __ pop(regs_.object());
   3795 
   3796   __ bind(&need_incremental);
   3797 
   3798   // Fall through when we need to inform the incremental marker.
   3799 }
   3800 
   3801 
   3802 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
   3803   CEntryStub ces(isolate(), 1, kSaveFPRegs);
   3804   __ call(ces.GetCode(), RelocInfo::CODE_TARGET);
   3805   int parameter_count_offset =
   3806       StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
   3807   __ mov(ebx, MemOperand(ebp, parameter_count_offset));
   3808   masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
   3809   __ pop(ecx);
   3810   int additional_offset =
   3811       function_mode() == JS_FUNCTION_STUB_MODE ? kPointerSize : 0;
   3812   __ lea(esp, MemOperand(esp, ebx, times_pointer_size, additional_offset));
   3813   __ jmp(ecx);  // Return to IC Miss stub, continuation still on stack.
   3814 }
   3815 
   3816 
   3817 void LoadICTrampolineStub::Generate(MacroAssembler* masm) {
   3818   __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
   3819   LoadICStub stub(isolate(), state());
   3820   stub.GenerateForTrampoline(masm);
   3821 }
   3822 
   3823 
   3824 void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) {
   3825   __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
   3826   KeyedLoadICStub stub(isolate(), state());
   3827   stub.GenerateForTrampoline(masm);
   3828 }
   3829 
   3830 
   3831 static void HandleArrayCases(MacroAssembler* masm, Register receiver,
   3832                              Register key, Register vector, Register slot,
   3833                              Register feedback, bool is_polymorphic,
   3834                              Label* miss) {
   3835   // feedback initially contains the feedback array
   3836   Label next, next_loop, prepare_next;
   3837   Label load_smi_map, compare_map;
   3838   Label start_polymorphic;
   3839 
   3840   __ push(receiver);
   3841   __ push(vector);
   3842 
   3843   Register receiver_map = receiver;
   3844   Register cached_map = vector;
   3845 
   3846   // Receiver might not be a heap object.
   3847   __ JumpIfSmi(receiver, &load_smi_map);
   3848   __ mov(receiver_map, FieldOperand(receiver, 0));
   3849   __ bind(&compare_map);
   3850   __ mov(cached_map, FieldOperand(feedback, FixedArray::OffsetOfElementAt(0)));
   3851 
   3852   // A named keyed load might have a 2 element array, all other cases can count
   3853   // on an array with at least 2 {map, handler} pairs, so they can go right
   3854   // into polymorphic array handling.
   3855   __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
   3856   __ j(not_equal, is_polymorphic ? &start_polymorphic : &next);
   3857 
   3858   // found, now call handler.
   3859   Register handler = feedback;
   3860   __ mov(handler, FieldOperand(feedback, FixedArray::OffsetOfElementAt(1)));
   3861   __ pop(vector);
   3862   __ pop(receiver);
   3863   __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
   3864   __ jmp(handler);
   3865 
   3866   if (!is_polymorphic) {
   3867     __ bind(&next);
   3868     __ cmp(FieldOperand(feedback, FixedArray::kLengthOffset),
   3869            Immediate(Smi::FromInt(2)));
   3870     __ j(not_equal, &start_polymorphic);
   3871     __ pop(vector);
   3872     __ pop(receiver);
   3873     __ jmp(miss);
   3874   }
   3875 
   3876   // Polymorphic, we have to loop from 2 to N
   3877   __ bind(&start_polymorphic);
   3878   __ push(key);
   3879   Register counter = key;
   3880   __ mov(counter, Immediate(Smi::FromInt(2)));
   3881   __ bind(&next_loop);
   3882   __ mov(cached_map, FieldOperand(feedback, counter, times_half_pointer_size,
   3883                                   FixedArray::kHeaderSize));
   3884   __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
   3885   __ j(not_equal, &prepare_next);
   3886   __ mov(handler, FieldOperand(feedback, counter, times_half_pointer_size,
   3887                                FixedArray::kHeaderSize + kPointerSize));
   3888   __ pop(key);
   3889   __ pop(vector);
   3890   __ pop(receiver);
   3891   __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
   3892   __ jmp(handler);
   3893 
   3894   __ bind(&prepare_next);
   3895   __ add(counter, Immediate(Smi::FromInt(2)));
   3896   __ cmp(counter, FieldOperand(feedback, FixedArray::kLengthOffset));
   3897   __ j(less, &next_loop);
   3898 
   3899   // We exhausted our array of map handler pairs.
   3900   __ pop(key);
   3901   __ pop(vector);
   3902   __ pop(receiver);
   3903   __ jmp(miss);
   3904 
   3905   __ bind(&load_smi_map);
   3906   __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
   3907   __ jmp(&compare_map);
   3908 }
   3909 
   3910 
   3911 static void HandleMonomorphicCase(MacroAssembler* masm, Register receiver,
   3912                                   Register key, Register vector, Register slot,
   3913                                   Register weak_cell, Label* miss) {
   3914   // feedback initially contains the feedback array
   3915   Label compare_smi_map;
   3916 
   3917   // Move the weak map into the weak_cell register.
   3918   Register ic_map = weak_cell;
   3919   __ mov(ic_map, FieldOperand(weak_cell, WeakCell::kValueOffset));
   3920 
   3921   // Receiver might not be a heap object.
   3922   __ JumpIfSmi(receiver, &compare_smi_map);
   3923   __ cmp(ic_map, FieldOperand(receiver, 0));
   3924   __ j(not_equal, miss);
   3925   Register handler = weak_cell;
   3926   __ mov(handler, FieldOperand(vector, slot, times_half_pointer_size,
   3927                                FixedArray::kHeaderSize + kPointerSize));
   3928   __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
   3929   __ jmp(handler);
   3930 
   3931   // In microbenchmarks, it made sense to unroll this code so that the call to
   3932   // the handler is duplicated for a HeapObject receiver and a Smi receiver.
   3933   __ bind(&compare_smi_map);
   3934   __ CompareRoot(ic_map, Heap::kHeapNumberMapRootIndex);
   3935   __ j(not_equal, miss);
   3936   __ mov(handler, FieldOperand(vector, slot, times_half_pointer_size,
   3937                                FixedArray::kHeaderSize + kPointerSize));
   3938   __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
   3939   __ jmp(handler);
   3940 }
   3941 
   3942 
   3943 void LoadICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); }
   3944 
   3945 
   3946 void LoadICStub::GenerateForTrampoline(MacroAssembler* masm) {
   3947   GenerateImpl(masm, true);
   3948 }
   3949 
   3950 
   3951 void LoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
   3952   Register receiver = LoadWithVectorDescriptor::ReceiverRegister();  // edx
   3953   Register name = LoadWithVectorDescriptor::NameRegister();          // ecx
   3954   Register vector = LoadWithVectorDescriptor::VectorRegister();      // ebx
   3955   Register slot = LoadWithVectorDescriptor::SlotRegister();          // eax
   3956   Register scratch = edi;
   3957   __ mov(scratch, FieldOperand(vector, slot, times_half_pointer_size,
   3958                                FixedArray::kHeaderSize));
   3959 
   3960   // Is it a weak cell?
   3961   Label try_array;
   3962   Label not_array, smi_key, key_okay, miss;
   3963   __ CompareRoot(FieldOperand(scratch, 0), Heap::kWeakCellMapRootIndex);
   3964   __ j(not_equal, &try_array);
   3965   HandleMonomorphicCase(masm, receiver, name, vector, slot, scratch, &miss);
   3966 
   3967   // Is it a fixed array?
   3968   __ bind(&try_array);
   3969   __ CompareRoot(FieldOperand(scratch, 0), Heap::kFixedArrayMapRootIndex);
   3970   __ j(not_equal, &not_array);
   3971   HandleArrayCases(masm, receiver, name, vector, slot, scratch, true, &miss);
   3972 
   3973   __ bind(&not_array);
   3974   __ CompareRoot(scratch, Heap::kmegamorphic_symbolRootIndex);
   3975   __ j(not_equal, &miss);
   3976   __ push(slot);
   3977   __ push(vector);
   3978   Code::Flags code_flags = Code::RemoveTypeAndHolderFromFlags(
   3979       Code::ComputeHandlerFlags(Code::LOAD_IC));
   3980   masm->isolate()->stub_cache()->GenerateProbe(masm, Code::LOAD_IC, code_flags,
   3981                                                receiver, name, vector, scratch);
   3982   __ pop(vector);
   3983   __ pop(slot);
   3984 
   3985   __ bind(&miss);
   3986   LoadIC::GenerateMiss(masm);
   3987 }
   3988 
   3989 
   3990 void KeyedLoadICStub::Generate(MacroAssembler* masm) {
   3991   GenerateImpl(masm, false);
   3992 }
   3993 
   3994 
   3995 void KeyedLoadICStub::GenerateForTrampoline(MacroAssembler* masm) {
   3996   GenerateImpl(masm, true);
   3997 }
   3998 
   3999 
   4000 void KeyedLoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
   4001   Register receiver = LoadWithVectorDescriptor::ReceiverRegister();  // edx
   4002   Register key = LoadWithVectorDescriptor::NameRegister();           // ecx
   4003   Register vector = LoadWithVectorDescriptor::VectorRegister();      // ebx
   4004   Register slot = LoadWithVectorDescriptor::SlotRegister();          // eax
   4005   Register feedback = edi;
   4006   __ mov(feedback, FieldOperand(vector, slot, times_half_pointer_size,
   4007                                 FixedArray::kHeaderSize));
   4008   // Is it a weak cell?
   4009   Label try_array;
   4010   Label not_array, smi_key, key_okay, miss;
   4011   __ CompareRoot(FieldOperand(feedback, 0), Heap::kWeakCellMapRootIndex);
   4012   __ j(not_equal, &try_array);
   4013   HandleMonomorphicCase(masm, receiver, key, vector, slot, feedback, &miss);
   4014 
   4015   __ bind(&try_array);
   4016   // Is it a fixed array?
   4017   __ CompareRoot(FieldOperand(feedback, 0), Heap::kFixedArrayMapRootIndex);
   4018   __ j(not_equal, &not_array);
   4019 
   4020   // We have a polymorphic element handler.
   4021   Label polymorphic, try_poly_name;
   4022   __ bind(&polymorphic);
   4023   HandleArrayCases(masm, receiver, key, vector, slot, feedback, true, &miss);
   4024 
   4025   __ bind(&not_array);
   4026   // Is it generic?
   4027   __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
   4028   __ j(not_equal, &try_poly_name);
   4029   Handle<Code> megamorphic_stub =
   4030       KeyedLoadIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState());
   4031   __ jmp(megamorphic_stub, RelocInfo::CODE_TARGET);
   4032 
   4033   __ bind(&try_poly_name);
   4034   // We might have a name in feedback, and a fixed array in the next slot.
   4035   __ cmp(key, feedback);
   4036   __ j(not_equal, &miss);
   4037   // If the name comparison succeeded, we know we have a fixed array with
   4038   // at least one map/handler pair.
   4039   __ mov(feedback, FieldOperand(vector, slot, times_half_pointer_size,
   4040                                 FixedArray::kHeaderSize + kPointerSize));
   4041   HandleArrayCases(masm, receiver, key, vector, slot, feedback, false, &miss);
   4042 
   4043   __ bind(&miss);
   4044   KeyedLoadIC::GenerateMiss(masm);
   4045 }
   4046 
   4047 
   4048 void VectorStoreICTrampolineStub::Generate(MacroAssembler* masm) {
   4049   __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
   4050   VectorStoreICStub stub(isolate(), state());
   4051   stub.GenerateForTrampoline(masm);
   4052 }
   4053 
   4054 
   4055 void VectorKeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) {
   4056   __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
   4057   VectorKeyedStoreICStub stub(isolate(), state());
   4058   stub.GenerateForTrampoline(masm);
   4059 }
   4060 
   4061 
   4062 void VectorStoreICStub::Generate(MacroAssembler* masm) {
   4063   GenerateImpl(masm, false);
   4064 }
   4065 
   4066 
   4067 void VectorStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
   4068   GenerateImpl(masm, true);
   4069 }
   4070 
   4071 
   4072 // value is on the stack already.
   4073 static void HandlePolymorphicStoreCase(MacroAssembler* masm, Register receiver,
   4074                                        Register key, Register vector,
   4075                                        Register slot, Register feedback,
   4076                                        bool is_polymorphic, Label* miss) {
   4077   // feedback initially contains the feedback array
   4078   Label next, next_loop, prepare_next;
   4079   Label load_smi_map, compare_map;
   4080   Label start_polymorphic;
   4081   Label pop_and_miss;
   4082   ExternalReference virtual_register =
   4083       ExternalReference::virtual_handler_register(masm->isolate());
   4084 
   4085   __ push(receiver);
   4086   __ push(vector);
   4087 
   4088   Register receiver_map = receiver;
   4089   Register cached_map = vector;
   4090 
   4091   // Receiver might not be a heap object.
   4092   __ JumpIfSmi(receiver, &load_smi_map);
   4093   __ mov(receiver_map, FieldOperand(receiver, 0));
   4094   __ bind(&compare_map);
   4095   __ mov(cached_map, FieldOperand(feedback, FixedArray::OffsetOfElementAt(0)));
   4096 
   4097   // A named keyed store might have a 2 element array, all other cases can count
   4098   // on an array with at least 2 {map, handler} pairs, so they can go right
   4099   // into polymorphic array handling.
   4100   __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
   4101   __ j(not_equal, &start_polymorphic);
   4102 
   4103   // found, now call handler.
   4104   Register handler = feedback;
   4105   DCHECK(handler.is(VectorStoreICDescriptor::ValueRegister()));
   4106   __ mov(handler, FieldOperand(feedback, FixedArray::OffsetOfElementAt(1)));
   4107   __ pop(vector);
   4108   __ pop(receiver);
   4109   __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
   4110   __ mov(Operand::StaticVariable(virtual_register), handler);
   4111   __ pop(handler);  // Pop "value".
   4112   __ jmp(Operand::StaticVariable(virtual_register));
   4113 
   4114   // Polymorphic, we have to loop from 2 to N
   4115   __ bind(&start_polymorphic);
   4116   __ push(key);
   4117   Register counter = key;
   4118   __ mov(counter, Immediate(Smi::FromInt(2)));
   4119 
   4120   if (!is_polymorphic) {
   4121     // If is_polymorphic is false, we may only have a two element array.
   4122     // Check against length now in that case.
   4123     __ cmp(counter, FieldOperand(feedback, FixedArray::kLengthOffset));
   4124     __ j(greater_equal, &pop_and_miss);
   4125   }
   4126 
   4127   __ bind(&next_loop);
   4128   __ mov(cached_map, FieldOperand(feedback, counter, times_half_pointer_size,
   4129                                   FixedArray::kHeaderSize));
   4130   __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
   4131   __ j(not_equal, &prepare_next);
   4132   __ mov(handler, FieldOperand(feedback, counter, times_half_pointer_size,
   4133                                FixedArray::kHeaderSize + kPointerSize));
   4134   __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
   4135   __ pop(key);
   4136   __ pop(vector);
   4137   __ pop(receiver);
   4138   __ mov(Operand::StaticVariable(virtual_register), handler);
   4139   __ pop(handler);  // Pop "value".
   4140   __ jmp(Operand::StaticVariable(virtual_register));
   4141 
   4142   __ bind(&prepare_next);
   4143   __ add(counter, Immediate(Smi::FromInt(2)));
   4144   __ cmp(counter, FieldOperand(feedback, FixedArray::kLengthOffset));
   4145   __ j(less, &next_loop);
   4146 
   4147   // We exhausted our array of map handler pairs.
   4148   __ bind(&pop_and_miss);
   4149   __ pop(key);
   4150   __ pop(vector);
   4151   __ pop(receiver);
   4152   __ jmp(miss);
   4153 
   4154   __ bind(&load_smi_map);
   4155   __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
   4156   __ jmp(&compare_map);
   4157 }
   4158 
   4159 
   4160 static void HandleMonomorphicStoreCase(MacroAssembler* masm, Register receiver,
   4161                                        Register key, Register vector,
   4162                                        Register slot, Register weak_cell,
   4163                                        Label* miss) {
   4164   // The store ic value is on the stack.
   4165   DCHECK(weak_cell.is(VectorStoreICDescriptor::ValueRegister()));
   4166   ExternalReference virtual_register =
   4167       ExternalReference::virtual_handler_register(masm->isolate());
   4168 
   4169   // feedback initially contains the feedback array
   4170   Label compare_smi_map;
   4171 
   4172   // Move the weak map into the weak_cell register.
   4173   Register ic_map = weak_cell;
   4174   __ mov(ic_map, FieldOperand(weak_cell, WeakCell::kValueOffset));
   4175 
   4176   // Receiver might not be a heap object.
   4177   __ JumpIfSmi(receiver, &compare_smi_map);
   4178   __ cmp(ic_map, FieldOperand(receiver, 0));
   4179   __ j(not_equal, miss);
   4180   __ mov(weak_cell, FieldOperand(vector, slot, times_half_pointer_size,
   4181                                  FixedArray::kHeaderSize + kPointerSize));
   4182   __ lea(weak_cell, FieldOperand(weak_cell, Code::kHeaderSize));
   4183   // Put the store ic value back in it's register.
   4184   __ mov(Operand::StaticVariable(virtual_register), weak_cell);
   4185   __ pop(weak_cell);  // Pop "value".
   4186   // jump to the handler.
   4187   __ jmp(Operand::StaticVariable(virtual_register));
   4188 
   4189   // In microbenchmarks, it made sense to unroll this code so that the call to
   4190   // the handler is duplicated for a HeapObject receiver and a Smi receiver.
   4191   __ bind(&compare_smi_map);
   4192   __ CompareRoot(ic_map, Heap::kHeapNumberMapRootIndex);
   4193   __ j(not_equal, miss);
   4194   __ mov(weak_cell, FieldOperand(vector, slot, times_half_pointer_size,
   4195                                  FixedArray::kHeaderSize + kPointerSize));
   4196   __ lea(weak_cell, FieldOperand(weak_cell, Code::kHeaderSize));
   4197   __ mov(Operand::StaticVariable(virtual_register), weak_cell);
   4198   __ pop(weak_cell);  // Pop "value".
   4199   // jump to the handler.
   4200   __ jmp(Operand::StaticVariable(virtual_register));
   4201 }
   4202 
   4203 
   4204 void VectorStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
   4205   Register receiver = VectorStoreICDescriptor::ReceiverRegister();  // edx
   4206   Register key = VectorStoreICDescriptor::NameRegister();           // ecx
   4207   Register value = VectorStoreICDescriptor::ValueRegister();        // eax
   4208   Register vector = VectorStoreICDescriptor::VectorRegister();      // ebx
   4209   Register slot = VectorStoreICDescriptor::SlotRegister();          // edi
   4210   Label miss;
   4211 
   4212   __ push(value);
   4213 
   4214   Register scratch = value;
   4215   __ mov(scratch, FieldOperand(vector, slot, times_half_pointer_size,
   4216                                FixedArray::kHeaderSize));
   4217 
   4218   // Is it a weak cell?
   4219   Label try_array;
   4220   Label not_array, smi_key, key_okay;
   4221   __ CompareRoot(FieldOperand(scratch, 0), Heap::kWeakCellMapRootIndex);
   4222   __ j(not_equal, &try_array);
   4223   HandleMonomorphicStoreCase(masm, receiver, key, vector, slot, scratch, &miss);
   4224 
   4225   // Is it a fixed array?
   4226   __ bind(&try_array);
   4227   __ CompareRoot(FieldOperand(scratch, 0), Heap::kFixedArrayMapRootIndex);
   4228   __ j(not_equal, &not_array);
   4229   HandlePolymorphicStoreCase(masm, receiver, key, vector, slot, scratch, true,
   4230                              &miss);
   4231 
   4232   __ bind(&not_array);
   4233   __ CompareRoot(scratch, Heap::kmegamorphic_symbolRootIndex);
   4234   __ j(not_equal, &miss);
   4235 
   4236   __ pop(value);
   4237   __ push(slot);
   4238   __ push(vector);
   4239   Code::Flags code_flags = Code::RemoveTypeAndHolderFromFlags(
   4240       Code::ComputeHandlerFlags(Code::STORE_IC));
   4241   masm->isolate()->stub_cache()->GenerateProbe(masm, Code::STORE_IC, code_flags,
   4242                                                receiver, key, slot, no_reg);
   4243   __ pop(vector);
   4244   __ pop(slot);
   4245   Label no_pop_miss;
   4246   __ jmp(&no_pop_miss);
   4247 
   4248   __ bind(&miss);
   4249   __ pop(value);
   4250   __ bind(&no_pop_miss);
   4251   StoreIC::GenerateMiss(masm);
   4252 }
   4253 
   4254 
   4255 void VectorKeyedStoreICStub::Generate(MacroAssembler* masm) {
   4256   GenerateImpl(masm, false);
   4257 }
   4258 
   4259 
   4260 void VectorKeyedStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
   4261   GenerateImpl(masm, true);
   4262 }
   4263 
   4264 
   4265 static void HandlePolymorphicKeyedStoreCase(MacroAssembler* masm,
   4266                                             Register receiver, Register key,
   4267                                             Register vector, Register slot,
   4268                                             Register feedback, Label* miss) {
   4269   // feedback initially contains the feedback array
   4270   Label next, next_loop, prepare_next;
   4271   Label load_smi_map, compare_map;
   4272   Label transition_call;
   4273   Label pop_and_miss;
   4274   ExternalReference virtual_register =
   4275       ExternalReference::virtual_handler_register(masm->isolate());
   4276   ExternalReference virtual_slot =
   4277       ExternalReference::virtual_slot_register(masm->isolate());
   4278 
   4279   __ push(receiver);
   4280   __ push(vector);
   4281 
   4282   Register receiver_map = receiver;
   4283   Register cached_map = vector;
   4284   Register value = StoreDescriptor::ValueRegister();
   4285 
   4286   // Receiver might not be a heap object.
   4287   __ JumpIfSmi(receiver, &load_smi_map);
   4288   __ mov(receiver_map, FieldOperand(receiver, 0));
   4289   __ bind(&compare_map);
   4290 
   4291   // Polymorphic, we have to loop from 0 to N - 1
   4292   __ push(key);
   4293   // Current stack layout:
   4294   // - esp[0]    -- key
   4295   // - esp[4]    -- vector
   4296   // - esp[8]    -- receiver
   4297   // - esp[12]   -- value
   4298   // - esp[16]   -- return address
   4299   //
   4300   // Required stack layout for handler call:
   4301   // - esp[0]    -- return address
   4302   // - receiver, key, value, vector, slot in registers.
   4303   // - handler in virtual register.
   4304   Register counter = key;
   4305   __ mov(counter, Immediate(Smi::FromInt(0)));
   4306   __ bind(&next_loop);
   4307   __ mov(cached_map, FieldOperand(feedback, counter, times_half_pointer_size,
   4308                                   FixedArray::kHeaderSize));
   4309   __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
   4310   __ j(not_equal, &prepare_next);
   4311   __ mov(cached_map, FieldOperand(feedback, counter, times_half_pointer_size,
   4312                                   FixedArray::kHeaderSize + kPointerSize));
   4313   __ CompareRoot(cached_map, Heap::kUndefinedValueRootIndex);
   4314   __ j(not_equal, &transition_call);
   4315   __ mov(feedback, FieldOperand(feedback, counter, times_half_pointer_size,
   4316                                 FixedArray::kHeaderSize + 2 * kPointerSize));
   4317   __ pop(key);
   4318   __ pop(vector);
   4319   __ pop(receiver);
   4320   __ lea(feedback, FieldOperand(feedback, Code::kHeaderSize));
   4321   __ mov(Operand::StaticVariable(virtual_register), feedback);
   4322   __ pop(value);
   4323   __ jmp(Operand::StaticVariable(virtual_register));
   4324 
   4325   __ bind(&transition_call);
   4326   // Current stack layout:
   4327   // - esp[0]    -- key
   4328   // - esp[4]    -- vector
   4329   // - esp[8]    -- receiver
   4330   // - esp[12]   -- value
   4331   // - esp[16]   -- return address
   4332   //
   4333   // Required stack layout for handler call:
   4334   // - esp[0]    -- return address
   4335   // - receiver, key, value, map, vector in registers.
   4336   // - handler and slot in virtual registers.
   4337   __ mov(Operand::StaticVariable(virtual_slot), slot);
   4338   __ mov(feedback, FieldOperand(feedback, counter, times_half_pointer_size,
   4339                                 FixedArray::kHeaderSize + 2 * kPointerSize));
   4340   __ lea(feedback, FieldOperand(feedback, Code::kHeaderSize));
   4341   __ mov(Operand::StaticVariable(virtual_register), feedback);
   4342 
   4343   __ mov(cached_map, FieldOperand(cached_map, WeakCell::kValueOffset));
   4344   // The weak cell may have been cleared.
   4345   __ JumpIfSmi(cached_map, &pop_and_miss);
   4346   DCHECK(!cached_map.is(VectorStoreTransitionDescriptor::MapRegister()));
   4347   __ mov(VectorStoreTransitionDescriptor::MapRegister(), cached_map);
   4348 
   4349   // Pop key into place.
   4350   __ pop(key);
   4351   __ pop(vector);
   4352   __ pop(receiver);
   4353   __ pop(value);
   4354   __ jmp(Operand::StaticVariable(virtual_register));
   4355 
   4356   __ bind(&prepare_next);
   4357   __ add(counter, Immediate(Smi::FromInt(3)));
   4358   __ cmp(counter, FieldOperand(feedback, FixedArray::kLengthOffset));
   4359   __ j(less, &next_loop);
   4360 
   4361   // We exhausted our array of map handler pairs.
   4362   __ bind(&pop_and_miss);
   4363   __ pop(key);
   4364   __ pop(vector);
   4365   __ pop(receiver);
   4366   __ jmp(miss);
   4367 
   4368   __ bind(&load_smi_map);
   4369   __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
   4370   __ jmp(&compare_map);
   4371 }
   4372 
   4373 
   4374 void VectorKeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
   4375   Register receiver = VectorStoreICDescriptor::ReceiverRegister();  // edx
   4376   Register key = VectorStoreICDescriptor::NameRegister();           // ecx
   4377   Register value = VectorStoreICDescriptor::ValueRegister();        // eax
   4378   Register vector = VectorStoreICDescriptor::VectorRegister();      // ebx
   4379   Register slot = VectorStoreICDescriptor::SlotRegister();          // edi
   4380   Label miss;
   4381 
   4382   __ push(value);
   4383 
   4384   Register scratch = value;
   4385   __ mov(scratch, FieldOperand(vector, slot, times_half_pointer_size,
   4386                                FixedArray::kHeaderSize));
   4387 
   4388   // Is it a weak cell?
   4389   Label try_array;
   4390   Label not_array, smi_key, key_okay;
   4391   __ CompareRoot(FieldOperand(scratch, 0), Heap::kWeakCellMapRootIndex);
   4392   __ j(not_equal, &try_array);
   4393   HandleMonomorphicStoreCase(masm, receiver, key, vector, slot, scratch, &miss);
   4394 
   4395   // Is it a fixed array?
   4396   __ bind(&try_array);
   4397   __ CompareRoot(FieldOperand(scratch, 0), Heap::kFixedArrayMapRootIndex);
   4398   __ j(not_equal, &not_array);
   4399   HandlePolymorphicKeyedStoreCase(masm, receiver, key, vector, slot, scratch,
   4400                                   &miss);
   4401 
   4402   __ bind(&not_array);
   4403   Label try_poly_name;
   4404   __ CompareRoot(scratch, Heap::kmegamorphic_symbolRootIndex);
   4405   __ j(not_equal, &try_poly_name);
   4406 
   4407   __ pop(value);
   4408 
   4409   Handle<Code> megamorphic_stub =
   4410       KeyedStoreIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState());
   4411   __ jmp(megamorphic_stub, RelocInfo::CODE_TARGET);
   4412 
   4413   __ bind(&try_poly_name);
   4414   // We might have a name in feedback, and a fixed array in the next slot.
   4415   __ cmp(key, scratch);
   4416   __ j(not_equal, &miss);
   4417   // If the name comparison succeeded, we know we have a fixed array with
   4418   // at least one map/handler pair.
   4419   __ mov(scratch, FieldOperand(vector, slot, times_half_pointer_size,
   4420                                FixedArray::kHeaderSize + kPointerSize));
   4421   HandlePolymorphicStoreCase(masm, receiver, key, vector, slot, scratch, false,
   4422                              &miss);
   4423 
   4424   __ bind(&miss);
   4425   __ pop(value);
   4426   KeyedStoreIC::GenerateMiss(masm);
   4427 }
   4428 
   4429 
   4430 void CallICTrampolineStub::Generate(MacroAssembler* masm) {
   4431   __ EmitLoadTypeFeedbackVector(ebx);
   4432   CallICStub stub(isolate(), state());
   4433   __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
   4434 }
   4435 
   4436 
   4437 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
   4438   if (masm->isolate()->function_entry_hook() != NULL) {
   4439     ProfileEntryHookStub stub(masm->isolate());
   4440     masm->CallStub(&stub);
   4441   }
   4442 }
   4443 
   4444 
   4445 void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
   4446   // Save volatile registers.
   4447   const int kNumSavedRegisters = 3;
   4448   __ push(eax);
   4449   __ push(ecx);
   4450   __ push(edx);
   4451 
   4452   // Calculate and push the original stack pointer.
   4453   __ lea(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize));
   4454   __ push(eax);
   4455 
   4456   // Retrieve our return address and use it to calculate the calling
   4457   // function's address.
   4458   __ mov(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize));
   4459   __ sub(eax, Immediate(Assembler::kCallInstructionLength));
   4460   __ push(eax);
   4461 
   4462   // Call the entry hook.
   4463   DCHECK(isolate()->function_entry_hook() != NULL);
   4464   __ call(FUNCTION_ADDR(isolate()->function_entry_hook()),
   4465           RelocInfo::RUNTIME_ENTRY);
   4466   __ add(esp, Immediate(2 * kPointerSize));
   4467 
   4468   // Restore ecx.
   4469   __ pop(edx);
   4470   __ pop(ecx);
   4471   __ pop(eax);
   4472 
   4473   __ ret(0);
   4474 }
   4475 
   4476 
   4477 template<class T>
   4478 static void CreateArrayDispatch(MacroAssembler* masm,
   4479                                 AllocationSiteOverrideMode mode) {
   4480   if (mode == DISABLE_ALLOCATION_SITES) {
   4481     T stub(masm->isolate(),
   4482            GetInitialFastElementsKind(),
   4483            mode);
   4484     __ TailCallStub(&stub);
   4485   } else if (mode == DONT_OVERRIDE) {
   4486     int last_index = GetSequenceIndexFromFastElementsKind(
   4487         TERMINAL_FAST_ELEMENTS_KIND);
   4488     for (int i = 0; i <= last_index; ++i) {
   4489       Label next;
   4490       ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
   4491       __ cmp(edx, kind);
   4492       __ j(not_equal, &next);
   4493       T stub(masm->isolate(), kind);
   4494       __ TailCallStub(&stub);
   4495       __ bind(&next);
   4496     }
   4497 
   4498     // If we reached this point there is a problem.
   4499     __ Abort(kUnexpectedElementsKindInArrayConstructor);
   4500   } else {
   4501     UNREACHABLE();
   4502   }
   4503 }
   4504 
   4505 
   4506 static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
   4507                                            AllocationSiteOverrideMode mode) {
   4508   // ebx - allocation site (if mode != DISABLE_ALLOCATION_SITES)
   4509   // edx - kind (if mode != DISABLE_ALLOCATION_SITES)
   4510   // eax - number of arguments
   4511   // edi - constructor?
   4512   // esp[0] - return address
   4513   // esp[4] - last argument
   4514   Label normal_sequence;
   4515   if (mode == DONT_OVERRIDE) {
   4516     STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
   4517     STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
   4518     STATIC_ASSERT(FAST_ELEMENTS == 2);
   4519     STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
   4520     STATIC_ASSERT(FAST_DOUBLE_ELEMENTS == 4);
   4521     STATIC_ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5);
   4522 
   4523     // is the low bit set? If so, we are holey and that is good.
   4524     __ test_b(edx, 1);
   4525     __ j(not_zero, &normal_sequence);
   4526   }
   4527 
   4528   // look at the first argument
   4529   __ mov(ecx, Operand(esp, kPointerSize));
   4530   __ test(ecx, ecx);
   4531   __ j(zero, &normal_sequence);
   4532 
   4533   if (mode == DISABLE_ALLOCATION_SITES) {
   4534     ElementsKind initial = GetInitialFastElementsKind();
   4535     ElementsKind holey_initial = GetHoleyElementsKind(initial);
   4536 
   4537     ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
   4538                                                   holey_initial,
   4539                                                   DISABLE_ALLOCATION_SITES);
   4540     __ TailCallStub(&stub_holey);
   4541 
   4542     __ bind(&normal_sequence);
   4543     ArraySingleArgumentConstructorStub stub(masm->isolate(),
   4544                                             initial,
   4545                                             DISABLE_ALLOCATION_SITES);
   4546     __ TailCallStub(&stub);
   4547   } else if (mode == DONT_OVERRIDE) {
   4548     // We are going to create a holey array, but our kind is non-holey.
   4549     // Fix kind and retry.
   4550     __ inc(edx);
   4551 
   4552     if (FLAG_debug_code) {
   4553       Handle<Map> allocation_site_map =
   4554           masm->isolate()->factory()->allocation_site_map();
   4555       __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map));
   4556       __ Assert(equal, kExpectedAllocationSite);
   4557     }
   4558 
   4559     // Save the resulting elements kind in type info. We can't just store r3
   4560     // in the AllocationSite::transition_info field because elements kind is
   4561     // restricted to a portion of the field...upper bits need to be left alone.
   4562     STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
   4563     __ add(FieldOperand(ebx, AllocationSite::kTransitionInfoOffset),
   4564            Immediate(Smi::FromInt(kFastElementsKindPackedToHoley)));
   4565 
   4566     __ bind(&normal_sequence);
   4567     int last_index = GetSequenceIndexFromFastElementsKind(
   4568         TERMINAL_FAST_ELEMENTS_KIND);
   4569     for (int i = 0; i <= last_index; ++i) {
   4570       Label next;
   4571       ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
   4572       __ cmp(edx, kind);
   4573       __ j(not_equal, &next);
   4574       ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
   4575       __ TailCallStub(&stub);
   4576       __ bind(&next);
   4577     }
   4578 
   4579     // If we reached this point there is a problem.
   4580     __ Abort(kUnexpectedElementsKindInArrayConstructor);
   4581   } else {
   4582     UNREACHABLE();
   4583   }
   4584 }
   4585 
   4586 
   4587 template<class T>
   4588 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
   4589   int to_index = GetSequenceIndexFromFastElementsKind(
   4590       TERMINAL_FAST_ELEMENTS_KIND);
   4591   for (int i = 0; i <= to_index; ++i) {
   4592     ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
   4593     T stub(isolate, kind);
   4594     stub.GetCode();
   4595     if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) {
   4596       T stub1(isolate, kind, DISABLE_ALLOCATION_SITES);
   4597       stub1.GetCode();
   4598     }
   4599   }
   4600 }
   4601 
   4602 
   4603 void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) {
   4604   ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
   4605       isolate);
   4606   ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
   4607       isolate);
   4608   ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
   4609       isolate);
   4610 }
   4611 
   4612 
   4613 void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime(
   4614     Isolate* isolate) {
   4615   ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
   4616   for (int i = 0; i < 2; i++) {
   4617     // For internal arrays we only need a few things
   4618     InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]);
   4619     stubh1.GetCode();
   4620     InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
   4621     stubh2.GetCode();
   4622     InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[i]);
   4623     stubh3.GetCode();
   4624   }
   4625 }
   4626 
   4627 
   4628 void ArrayConstructorStub::GenerateDispatchToArrayStub(
   4629     MacroAssembler* masm,
   4630     AllocationSiteOverrideMode mode) {
   4631   if (argument_count() == ANY) {
   4632     Label not_zero_case, not_one_case;
   4633     __ test(eax, eax);
   4634     __ j(not_zero, &not_zero_case);
   4635     CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
   4636 
   4637     __ bind(&not_zero_case);
   4638     __ cmp(eax, 1);
   4639     __ j(greater, &not_one_case);
   4640     CreateArrayDispatchOneArgument(masm, mode);
   4641 
   4642     __ bind(&not_one_case);
   4643     CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
   4644   } else if (argument_count() == NONE) {
   4645     CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
   4646   } else if (argument_count() == ONE) {
   4647     CreateArrayDispatchOneArgument(masm, mode);
   4648   } else if (argument_count() == MORE_THAN_ONE) {
   4649     CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
   4650   } else {
   4651     UNREACHABLE();
   4652   }
   4653 }
   4654 
   4655 
   4656 void ArrayConstructorStub::Generate(MacroAssembler* masm) {
   4657   // ----------- S t a t e -------------
   4658   //  -- eax : argc (only if argument_count() is ANY or MORE_THAN_ONE)
   4659   //  -- ebx : AllocationSite or undefined
   4660   //  -- edi : constructor
   4661   //  -- edx : Original constructor
   4662   //  -- esp[0] : return address
   4663   //  -- esp[4] : last argument
   4664   // -----------------------------------
   4665   if (FLAG_debug_code) {
   4666     // The array construct code is only set for the global and natives
   4667     // builtin Array functions which always have maps.
   4668 
   4669     // Initial map for the builtin Array function should be a map.
   4670     __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
   4671     // Will both indicate a NULL and a Smi.
   4672     __ test(ecx, Immediate(kSmiTagMask));
   4673     __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
   4674     __ CmpObjectType(ecx, MAP_TYPE, ecx);
   4675     __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
   4676 
   4677     // We should either have undefined in ebx or a valid AllocationSite
   4678     __ AssertUndefinedOrAllocationSite(ebx);
   4679   }
   4680 
   4681   Label subclassing;
   4682 
   4683   // Enter the context of the Array function.
   4684   __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
   4685 
   4686   __ cmp(edx, edi);
   4687   __ j(not_equal, &subclassing);
   4688 
   4689   Label no_info;
   4690   // If the feedback vector is the undefined value call an array constructor
   4691   // that doesn't use AllocationSites.
   4692   __ cmp(ebx, isolate()->factory()->undefined_value());
   4693   __ j(equal, &no_info);
   4694 
   4695   // Only look at the lower 16 bits of the transition info.
   4696   __ mov(edx, FieldOperand(ebx, AllocationSite::kTransitionInfoOffset));
   4697   __ SmiUntag(edx);
   4698   STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
   4699   __ and_(edx, Immediate(AllocationSite::ElementsKindBits::kMask));
   4700   GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
   4701 
   4702   __ bind(&no_info);
   4703   GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
   4704 
   4705   // Subclassing.
   4706   __ bind(&subclassing);
   4707   switch (argument_count()) {
   4708     case ANY:
   4709     case MORE_THAN_ONE:
   4710       __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), edi);
   4711       __ add(eax, Immediate(3));
   4712       break;
   4713     case NONE:
   4714       __ mov(Operand(esp, 1 * kPointerSize), edi);
   4715       __ mov(eax, Immediate(3));
   4716       break;
   4717     case ONE:
   4718       __ mov(Operand(esp, 2 * kPointerSize), edi);
   4719       __ mov(eax, Immediate(4));
   4720       break;
   4721   }
   4722   __ PopReturnAddressTo(ecx);
   4723   __ Push(edx);
   4724   __ Push(ebx);
   4725   __ PushReturnAddressFrom(ecx);
   4726   __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate()));
   4727 }
   4728 
   4729 
   4730 void InternalArrayConstructorStub::GenerateCase(
   4731     MacroAssembler* masm, ElementsKind kind) {
   4732   Label not_zero_case, not_one_case;
   4733   Label normal_sequence;
   4734 
   4735   __ test(eax, eax);
   4736   __ j(not_zero, &not_zero_case);
   4737   InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
   4738   __ TailCallStub(&stub0);
   4739 
   4740   __ bind(&not_zero_case);
   4741   __ cmp(eax, 1);
   4742   __ j(greater, &not_one_case);
   4743 
   4744   if (IsFastPackedElementsKind(kind)) {
   4745     // We might need to create a holey array
   4746     // look at the first argument
   4747     __ mov(ecx, Operand(esp, kPointerSize));
   4748     __ test(ecx, ecx);
   4749     __ j(zero, &normal_sequence);
   4750 
   4751     InternalArraySingleArgumentConstructorStub
   4752         stub1_holey(isolate(), GetHoleyElementsKind(kind));
   4753     __ TailCallStub(&stub1_holey);
   4754   }
   4755 
   4756   __ bind(&normal_sequence);
   4757   InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
   4758   __ TailCallStub(&stub1);
   4759 
   4760   __ bind(&not_one_case);
   4761   InternalArrayNArgumentsConstructorStub stubN(isolate(), kind);
   4762   __ TailCallStub(&stubN);
   4763 }
   4764 
   4765 
   4766 void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
   4767   // ----------- S t a t e -------------
   4768   //  -- eax : argc
   4769   //  -- edi : constructor
   4770   //  -- esp[0] : return address
   4771   //  -- esp[4] : last argument
   4772   // -----------------------------------
   4773 
   4774   if (FLAG_debug_code) {
   4775     // The array construct code is only set for the global and natives
   4776     // builtin Array functions which always have maps.
   4777 
   4778     // Initial map for the builtin Array function should be a map.
   4779     __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
   4780     // Will both indicate a NULL and a Smi.
   4781     __ test(ecx, Immediate(kSmiTagMask));
   4782     __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
   4783     __ CmpObjectType(ecx, MAP_TYPE, ecx);
   4784     __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
   4785   }
   4786 
   4787   // Figure out the right elements kind
   4788   __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
   4789 
   4790   // Load the map's "bit field 2" into |result|. We only need the first byte,
   4791   // but the following masking takes care of that anyway.
   4792   __ mov(ecx, FieldOperand(ecx, Map::kBitField2Offset));
   4793   // Retrieve elements_kind from bit field 2.
   4794   __ DecodeField<Map::ElementsKindBits>(ecx);
   4795 
   4796   if (FLAG_debug_code) {
   4797     Label done;
   4798     __ cmp(ecx, Immediate(FAST_ELEMENTS));
   4799     __ j(equal, &done);
   4800     __ cmp(ecx, Immediate(FAST_HOLEY_ELEMENTS));
   4801     __ Assert(equal,
   4802               kInvalidElementsKindForInternalArrayOrInternalPackedArray);
   4803     __ bind(&done);
   4804   }
   4805 
   4806   Label fast_elements_case;
   4807   __ cmp(ecx, Immediate(FAST_ELEMENTS));
   4808   __ j(equal, &fast_elements_case);
   4809   GenerateCase(masm, FAST_HOLEY_ELEMENTS);
   4810 
   4811   __ bind(&fast_elements_case);
   4812   GenerateCase(masm, FAST_ELEMENTS);
   4813 }
   4814 
   4815 
   4816 void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) {
   4817   Register context_reg = esi;
   4818   Register slot_reg = ebx;
   4819   Register result_reg = eax;
   4820   Label slow_case;
   4821 
   4822   // Go up context chain to the script context.
   4823   for (int i = 0; i < depth(); ++i) {
   4824     __ mov(result_reg, ContextOperand(context_reg, Context::PREVIOUS_INDEX));
   4825     context_reg = result_reg;
   4826   }
   4827 
   4828   // Load the PropertyCell value at the specified slot.
   4829   __ mov(result_reg, ContextOperand(context_reg, slot_reg));
   4830   __ mov(result_reg, FieldOperand(result_reg, PropertyCell::kValueOffset));
   4831 
   4832   // Check that value is not the_hole.
   4833   __ CompareRoot(result_reg, Heap::kTheHoleValueRootIndex);
   4834   __ j(equal, &slow_case, Label::kNear);
   4835   __ Ret();
   4836 
   4837   // Fallback to the runtime.
   4838   __ bind(&slow_case);
   4839   __ SmiTag(slot_reg);
   4840   __ Pop(result_reg);  // Pop return address.
   4841   __ Push(slot_reg);
   4842   __ Push(result_reg);  // Push return address.
   4843   __ TailCallRuntime(Runtime::kLoadGlobalViaContext);
   4844 }
   4845 
   4846 
   4847 void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) {
   4848   Register context_reg = esi;
   4849   Register slot_reg = ebx;
   4850   Register value_reg = eax;
   4851   Register cell_reg = edi;
   4852   Register cell_details_reg = edx;
   4853   Register cell_value_reg = ecx;
   4854   Label fast_heapobject_case, fast_smi_case, slow_case;
   4855 
   4856   if (FLAG_debug_code) {
   4857     __ CompareRoot(value_reg, Heap::kTheHoleValueRootIndex);
   4858     __ Check(not_equal, kUnexpectedValue);
   4859   }
   4860 
   4861   // Go up context chain to the script context.
   4862   for (int i = 0; i < depth(); ++i) {
   4863     __ mov(cell_reg, ContextOperand(context_reg, Context::PREVIOUS_INDEX));
   4864     context_reg = cell_reg;
   4865   }
   4866 
   4867   // Load the PropertyCell at the specified slot.
   4868   __ mov(cell_reg, ContextOperand(context_reg, slot_reg));
   4869 
   4870   // Load PropertyDetails for the cell (actually only the cell_type and kind).
   4871   __ mov(cell_details_reg,
   4872          FieldOperand(cell_reg, PropertyCell::kDetailsOffset));
   4873   __ SmiUntag(cell_details_reg);
   4874   __ and_(cell_details_reg,
   4875           Immediate(PropertyDetails::PropertyCellTypeField::kMask |
   4876                     PropertyDetails::KindField::kMask |
   4877                     PropertyDetails::kAttributesReadOnlyMask));
   4878 
   4879   // Check if PropertyCell holds mutable data.
   4880   Label not_mutable_data;
   4881   __ cmp(cell_details_reg,
   4882          Immediate(PropertyDetails::PropertyCellTypeField::encode(
   4883                        PropertyCellType::kMutable) |
   4884                    PropertyDetails::KindField::encode(kData)));
   4885   __ j(not_equal, &not_mutable_data);
   4886   __ JumpIfSmi(value_reg, &fast_smi_case);
   4887   __ bind(&fast_heapobject_case);
   4888   __ mov(FieldOperand(cell_reg, PropertyCell::kValueOffset), value_reg);
   4889   __ RecordWriteField(cell_reg, PropertyCell::kValueOffset, value_reg,
   4890                       cell_details_reg, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
   4891                       OMIT_SMI_CHECK);
   4892   // RecordWriteField clobbers the value register, so we need to reload.
   4893   __ mov(value_reg, FieldOperand(cell_reg, PropertyCell::kValueOffset));
   4894   __ Ret();
   4895   __ bind(&not_mutable_data);
   4896 
   4897   // Check if PropertyCell value matches the new value (relevant for Constant,
   4898   // ConstantType and Undefined cells).
   4899   Label not_same_value;
   4900   __ mov(cell_value_reg, FieldOperand(cell_reg, PropertyCell::kValueOffset));
   4901   __ cmp(cell_value_reg, value_reg);
   4902   __ j(not_equal, &not_same_value,
   4903        FLAG_debug_code ? Label::kFar : Label::kNear);
   4904   // Make sure the PropertyCell is not marked READ_ONLY.
   4905   __ test(cell_details_reg,
   4906           Immediate(PropertyDetails::kAttributesReadOnlyMask));
   4907   __ j(not_zero, &slow_case);
   4908   if (FLAG_debug_code) {
   4909     Label done;
   4910     // This can only be true for Constant, ConstantType and Undefined cells,
   4911     // because we never store the_hole via this stub.
   4912     __ cmp(cell_details_reg,
   4913            Immediate(PropertyDetails::PropertyCellTypeField::encode(
   4914                          PropertyCellType::kConstant) |
   4915                      PropertyDetails::KindField::encode(kData)));
   4916     __ j(equal, &done);
   4917     __ cmp(cell_details_reg,
   4918            Immediate(PropertyDetails::PropertyCellTypeField::encode(
   4919                          PropertyCellType::kConstantType) |
   4920                      PropertyDetails::KindField::encode(kData)));
   4921     __ j(equal, &done);
   4922     __ cmp(cell_details_reg,
   4923            Immediate(PropertyDetails::PropertyCellTypeField::encode(
   4924                          PropertyCellType::kUndefined) |
   4925                      PropertyDetails::KindField::encode(kData)));
   4926     __ Check(equal, kUnexpectedValue);
   4927     __ bind(&done);
   4928   }
   4929   __ Ret();
   4930   __ bind(&not_same_value);
   4931 
   4932   // Check if PropertyCell contains data with constant type (and is not
   4933   // READ_ONLY).
   4934   __ cmp(cell_details_reg,
   4935          Immediate(PropertyDetails::PropertyCellTypeField::encode(
   4936                        PropertyCellType::kConstantType) |
   4937                    PropertyDetails::KindField::encode(kData)));
   4938   __ j(not_equal, &slow_case, Label::kNear);
   4939 
   4940   // Now either both old and new values must be SMIs or both must be heap
   4941   // objects with same map.
   4942   Label value_is_heap_object;
   4943   __ JumpIfNotSmi(value_reg, &value_is_heap_object, Label::kNear);
   4944   __ JumpIfNotSmi(cell_value_reg, &slow_case, Label::kNear);
   4945   // Old and new values are SMIs, no need for a write barrier here.
   4946   __ bind(&fast_smi_case);
   4947   __ mov(FieldOperand(cell_reg, PropertyCell::kValueOffset), value_reg);
   4948   __ Ret();
   4949   __ bind(&value_is_heap_object);
   4950   __ JumpIfSmi(cell_value_reg, &slow_case, Label::kNear);
   4951   Register cell_value_map_reg = cell_value_reg;
   4952   __ mov(cell_value_map_reg,
   4953          FieldOperand(cell_value_reg, HeapObject::kMapOffset));
   4954   __ cmp(cell_value_map_reg, FieldOperand(value_reg, HeapObject::kMapOffset));
   4955   __ j(equal, &fast_heapobject_case);
   4956 
   4957   // Fallback to the runtime.
   4958   __ bind(&slow_case);
   4959   __ SmiTag(slot_reg);
   4960   __ Pop(cell_reg);  // Pop return address.
   4961   __ Push(slot_reg);
   4962   __ Push(value_reg);
   4963   __ Push(cell_reg);  // Push return address.
   4964   __ TailCallRuntime(is_strict(language_mode())
   4965                          ? Runtime::kStoreGlobalViaContext_Strict
   4966                          : Runtime::kStoreGlobalViaContext_Sloppy);
   4967 }
   4968 
   4969 
   4970 // Generates an Operand for saving parameters after PrepareCallApiFunction.
   4971 static Operand ApiParameterOperand(int index) {
   4972   return Operand(esp, index * kPointerSize);
   4973 }
   4974 
   4975 
   4976 // Prepares stack to put arguments (aligns and so on). Reserves
   4977 // space for return value if needed (assumes the return value is a handle).
   4978 // Arguments must be stored in ApiParameterOperand(0), ApiParameterOperand(1)
   4979 // etc. Saves context (esi). If space was reserved for return value then
   4980 // stores the pointer to the reserved slot into esi.
   4981 static void PrepareCallApiFunction(MacroAssembler* masm, int argc) {
   4982   __ EnterApiExitFrame(argc);
   4983   if (__ emit_debug_code()) {
   4984     __ mov(esi, Immediate(bit_cast<int32_t>(kZapValue)));
   4985   }
   4986 }
   4987 
   4988 
   4989 // Calls an API function.  Allocates HandleScope, extracts returned value
   4990 // from handle and propagates exceptions.  Clobbers ebx, edi and
   4991 // caller-save registers.  Restores context.  On return removes
   4992 // stack_space * kPointerSize (GCed).
   4993 static void CallApiFunctionAndReturn(MacroAssembler* masm,
   4994                                      Register function_address,
   4995                                      ExternalReference thunk_ref,
   4996                                      Operand thunk_last_arg, int stack_space,
   4997                                      Operand* stack_space_operand,
   4998                                      Operand return_value_operand,
   4999                                      Operand* context_restore_operand) {
   5000   Isolate* isolate = masm->isolate();
   5001 
   5002   ExternalReference next_address =
   5003       ExternalReference::handle_scope_next_address(isolate);
   5004   ExternalReference limit_address =
   5005       ExternalReference::handle_scope_limit_address(isolate);
   5006   ExternalReference level_address =
   5007       ExternalReference::handle_scope_level_address(isolate);
   5008 
   5009   DCHECK(edx.is(function_address));
   5010   // Allocate HandleScope in callee-save registers.
   5011   __ mov(ebx, Operand::StaticVariable(next_address));
   5012   __ mov(edi, Operand::StaticVariable(limit_address));
   5013   __ add(Operand::StaticVariable(level_address), Immediate(1));
   5014 
   5015   if (FLAG_log_timer_events) {
   5016     FrameScope frame(masm, StackFrame::MANUAL);
   5017     __ PushSafepointRegisters();
   5018     __ PrepareCallCFunction(1, eax);
   5019     __ mov(Operand(esp, 0),
   5020            Immediate(ExternalReference::isolate_address(isolate)));
   5021     __ CallCFunction(ExternalReference::log_enter_external_function(isolate),
   5022                      1);
   5023     __ PopSafepointRegisters();
   5024   }
   5025 
   5026 
   5027   Label profiler_disabled;
   5028   Label end_profiler_check;
   5029   __ mov(eax, Immediate(ExternalReference::is_profiling_address(isolate)));
   5030   __ cmpb(Operand(eax, 0), 0);
   5031   __ j(zero, &profiler_disabled);
   5032 
   5033   // Additional parameter is the address of the actual getter function.
   5034   __ mov(thunk_last_arg, function_address);
   5035   // Call the api function.
   5036   __ mov(eax, Immediate(thunk_ref));
   5037   __ call(eax);
   5038   __ jmp(&end_profiler_check);
   5039 
   5040   __ bind(&profiler_disabled);
   5041   // Call the api function.
   5042   __ call(function_address);
   5043   __ bind(&end_profiler_check);
   5044 
   5045   if (FLAG_log_timer_events) {
   5046     FrameScope frame(masm, StackFrame::MANUAL);
   5047     __ PushSafepointRegisters();
   5048     __ PrepareCallCFunction(1, eax);
   5049     __ mov(Operand(esp, 0),
   5050            Immediate(ExternalReference::isolate_address(isolate)));
   5051     __ CallCFunction(ExternalReference::log_leave_external_function(isolate),
   5052                      1);
   5053     __ PopSafepointRegisters();
   5054   }
   5055 
   5056   Label prologue;
   5057   // Load the value from ReturnValue
   5058   __ mov(eax, return_value_operand);
   5059 
   5060   Label promote_scheduled_exception;
   5061   Label delete_allocated_handles;
   5062   Label leave_exit_frame;
   5063 
   5064   __ bind(&prologue);
   5065   // No more valid handles (the result handle was the last one). Restore
   5066   // previous handle scope.
   5067   __ mov(Operand::StaticVariable(next_address), ebx);
   5068   __ sub(Operand::StaticVariable(level_address), Immediate(1));
   5069   __ Assert(above_equal, kInvalidHandleScopeLevel);
   5070   __ cmp(edi, Operand::StaticVariable(limit_address));
   5071   __ j(not_equal, &delete_allocated_handles);
   5072 
   5073   // Leave the API exit frame.
   5074   __ bind(&leave_exit_frame);
   5075   bool restore_context = context_restore_operand != NULL;
   5076   if (restore_context) {
   5077     __ mov(esi, *context_restore_operand);
   5078   }
   5079   if (stack_space_operand != nullptr) {
   5080     __ mov(ebx, *stack_space_operand);
   5081   }
   5082   __ LeaveApiExitFrame(!restore_context);
   5083 
   5084   // Check if the function scheduled an exception.
   5085   ExternalReference scheduled_exception_address =
   5086       ExternalReference::scheduled_exception_address(isolate);
   5087   __ cmp(Operand::StaticVariable(scheduled_exception_address),
   5088          Immediate(isolate->factory()->the_hole_value()));
   5089   __ j(not_equal, &promote_scheduled_exception);
   5090 
   5091 #if DEBUG
   5092   // Check if the function returned a valid JavaScript value.
   5093   Label ok;
   5094   Register return_value = eax;
   5095   Register map = ecx;
   5096 
   5097   __ JumpIfSmi(return_value, &ok, Label::kNear);
   5098   __ mov(map, FieldOperand(return_value, HeapObject::kMapOffset));
   5099 
   5100   __ CmpInstanceType(map, LAST_NAME_TYPE);
   5101   __ j(below_equal, &ok, Label::kNear);
   5102 
   5103   __ CmpInstanceType(map, FIRST_JS_RECEIVER_TYPE);
   5104   __ j(above_equal, &ok, Label::kNear);
   5105 
   5106   __ cmp(map, isolate->factory()->heap_number_map());
   5107   __ j(equal, &ok, Label::kNear);
   5108 
   5109   __ cmp(return_value, isolate->factory()->undefined_value());
   5110   __ j(equal, &ok, Label::kNear);
   5111 
   5112   __ cmp(return_value, isolate->factory()->true_value());
   5113   __ j(equal, &ok, Label::kNear);
   5114 
   5115   __ cmp(return_value, isolate->factory()->false_value());
   5116   __ j(equal, &ok, Label::kNear);
   5117 
   5118   __ cmp(return_value, isolate->factory()->null_value());
   5119   __ j(equal, &ok, Label::kNear);
   5120 
   5121   __ Abort(kAPICallReturnedInvalidObject);
   5122 
   5123   __ bind(&ok);
   5124 #endif
   5125 
   5126   if (stack_space_operand != nullptr) {
   5127     DCHECK_EQ(0, stack_space);
   5128     __ pop(ecx);
   5129     __ add(esp, ebx);
   5130     __ jmp(ecx);
   5131   } else {
   5132     __ ret(stack_space * kPointerSize);
   5133   }
   5134 
   5135   // Re-throw by promoting a scheduled exception.
   5136   __ bind(&promote_scheduled_exception);
   5137   __ TailCallRuntime(Runtime::kPromoteScheduledException);
   5138 
   5139   // HandleScope limit has changed. Delete allocated extensions.
   5140   ExternalReference delete_extensions =
   5141       ExternalReference::delete_handle_scope_extensions(isolate);
   5142   __ bind(&delete_allocated_handles);
   5143   __ mov(Operand::StaticVariable(limit_address), edi);
   5144   __ mov(edi, eax);
   5145   __ mov(Operand(esp, 0),
   5146          Immediate(ExternalReference::isolate_address(isolate)));
   5147   __ mov(eax, Immediate(delete_extensions));
   5148   __ call(eax);
   5149   __ mov(eax, edi);
   5150   __ jmp(&leave_exit_frame);
   5151 }
   5152 
   5153 
   5154 static void CallApiFunctionStubHelper(MacroAssembler* masm,
   5155                                       const ParameterCount& argc,
   5156                                       bool return_first_arg,
   5157                                       bool call_data_undefined) {
   5158   // ----------- S t a t e -------------
   5159   //  -- edi                 : callee
   5160   //  -- ebx                 : call_data
   5161   //  -- ecx                 : holder
   5162   //  -- edx                 : api_function_address
   5163   //  -- esi                 : context
   5164   //  -- eax                 : number of arguments if argc is a register
   5165   //  --
   5166   //  -- esp[0]              : return address
   5167   //  -- esp[4]              : last argument
   5168   //  -- ...
   5169   //  -- esp[argc * 4]       : first argument
   5170   //  -- esp[(argc + 1) * 4] : receiver
   5171   // -----------------------------------
   5172 
   5173   Register callee = edi;
   5174   Register call_data = ebx;
   5175   Register holder = ecx;
   5176   Register api_function_address = edx;
   5177   Register context = esi;
   5178   Register return_address = eax;
   5179 
   5180   typedef FunctionCallbackArguments FCA;
   5181 
   5182   STATIC_ASSERT(FCA::kContextSaveIndex == 6);
   5183   STATIC_ASSERT(FCA::kCalleeIndex == 5);
   5184   STATIC_ASSERT(FCA::kDataIndex == 4);
   5185   STATIC_ASSERT(FCA::kReturnValueOffset == 3);
   5186   STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
   5187   STATIC_ASSERT(FCA::kIsolateIndex == 1);
   5188   STATIC_ASSERT(FCA::kHolderIndex == 0);
   5189   STATIC_ASSERT(FCA::kArgsLength == 7);
   5190 
   5191   DCHECK(argc.is_immediate() || eax.is(argc.reg()));
   5192 
   5193   if (argc.is_immediate()) {
   5194     __ pop(return_address);
   5195     // context save.
   5196     __ push(context);
   5197   } else {
   5198     // pop return address and save context
   5199     __ xchg(context, Operand(esp, 0));
   5200     return_address = context;
   5201   }
   5202 
   5203   // callee
   5204   __ push(callee);
   5205 
   5206   // call data
   5207   __ push(call_data);
   5208 
   5209   Register scratch = call_data;
   5210   if (!call_data_undefined) {
   5211     // return value
   5212     __ push(Immediate(masm->isolate()->factory()->undefined_value()));
   5213     // return value default
   5214     __ push(Immediate(masm->isolate()->factory()->undefined_value()));
   5215   } else {
   5216     // return value
   5217     __ push(scratch);
   5218     // return value default
   5219     __ push(scratch);
   5220   }
   5221   // isolate
   5222   __ push(Immediate(reinterpret_cast<int>(masm->isolate())));
   5223   // holder
   5224   __ push(holder);
   5225 
   5226   __ mov(scratch, esp);
   5227 
   5228   // push return address
   5229   __ push(return_address);
   5230 
   5231   // load context from callee
   5232   __ mov(context, FieldOperand(callee, JSFunction::kContextOffset));
   5233 
   5234   // API function gets reference to the v8::Arguments. If CPU profiler
   5235   // is enabled wrapper function will be called and we need to pass
   5236   // address of the callback as additional parameter, always allocate
   5237   // space for it.
   5238   const int kApiArgc = 1 + 1;
   5239 
   5240   // Allocate the v8::Arguments structure in the arguments' space since
   5241   // it's not controlled by GC.
   5242   const int kApiStackSpace = 4;
   5243 
   5244   PrepareCallApiFunction(masm, kApiArgc + kApiStackSpace);
   5245 
   5246   // FunctionCallbackInfo::implicit_args_.
   5247   __ mov(ApiParameterOperand(2), scratch);
   5248   if (argc.is_immediate()) {
   5249     __ add(scratch,
   5250            Immediate((argc.immediate() + FCA::kArgsLength - 1) * kPointerSize));
   5251     // FunctionCallbackInfo::values_.
   5252     __ mov(ApiParameterOperand(3), scratch);
   5253     // FunctionCallbackInfo::length_.
   5254     __ Move(ApiParameterOperand(4), Immediate(argc.immediate()));
   5255     // FunctionCallbackInfo::is_construct_call_.
   5256     __ Move(ApiParameterOperand(5), Immediate(0));
   5257   } else {
   5258     __ lea(scratch, Operand(scratch, argc.reg(), times_pointer_size,
   5259                             (FCA::kArgsLength - 1) * kPointerSize));
   5260     // FunctionCallbackInfo::values_.
   5261     __ mov(ApiParameterOperand(3), scratch);
   5262     // FunctionCallbackInfo::length_.
   5263     __ mov(ApiParameterOperand(4), argc.reg());
   5264     // FunctionCallbackInfo::is_construct_call_.
   5265     __ lea(argc.reg(), Operand(argc.reg(), times_pointer_size,
   5266                                (FCA::kArgsLength + 1) * kPointerSize));
   5267     __ mov(ApiParameterOperand(5), argc.reg());
   5268   }
   5269 
   5270   // v8::InvocationCallback's argument.
   5271   __ lea(scratch, ApiParameterOperand(2));
   5272   __ mov(ApiParameterOperand(0), scratch);
   5273 
   5274   ExternalReference thunk_ref =
   5275       ExternalReference::invoke_function_callback(masm->isolate());
   5276 
   5277   Operand context_restore_operand(ebp,
   5278                                   (2 + FCA::kContextSaveIndex) * kPointerSize);
   5279   // Stores return the first js argument
   5280   int return_value_offset = 0;
   5281   if (return_first_arg) {
   5282     return_value_offset = 2 + FCA::kArgsLength;
   5283   } else {
   5284     return_value_offset = 2 + FCA::kReturnValueOffset;
   5285   }
   5286   Operand return_value_operand(ebp, return_value_offset * kPointerSize);
   5287   int stack_space = 0;
   5288   Operand is_construct_call_operand = ApiParameterOperand(5);
   5289   Operand* stack_space_operand = &is_construct_call_operand;
   5290   if (argc.is_immediate()) {
   5291     stack_space = argc.immediate() + FCA::kArgsLength + 1;
   5292     stack_space_operand = nullptr;
   5293   }
   5294   CallApiFunctionAndReturn(masm, api_function_address, thunk_ref,
   5295                            ApiParameterOperand(1), stack_space,
   5296                            stack_space_operand, return_value_operand,
   5297                            &context_restore_operand);
   5298 }
   5299 
   5300 
   5301 void CallApiFunctionStub::Generate(MacroAssembler* masm) {
   5302   bool call_data_undefined = this->call_data_undefined();
   5303   CallApiFunctionStubHelper(masm, ParameterCount(eax), false,
   5304                             call_data_undefined);
   5305 }
   5306 
   5307 
   5308 void CallApiAccessorStub::Generate(MacroAssembler* masm) {
   5309   bool is_store = this->is_store();
   5310   int argc = this->argc();
   5311   bool call_data_undefined = this->call_data_undefined();
   5312   CallApiFunctionStubHelper(masm, ParameterCount(argc), is_store,
   5313                             call_data_undefined);
   5314 }
   5315 
   5316 
   5317 void CallApiGetterStub::Generate(MacroAssembler* masm) {
   5318   // ----------- S t a t e -------------
   5319   //  -- esp[0]                  : return address
   5320   //  -- esp[4]                  : name
   5321   //  -- esp[8 - kArgsLength*4]  : PropertyCallbackArguments object
   5322   //  -- ...
   5323   //  -- edx                    : api_function_address
   5324   // -----------------------------------
   5325   DCHECK(edx.is(ApiGetterDescriptor::function_address()));
   5326 
   5327   // array for v8::Arguments::values_, handler for name and pointer
   5328   // to the values (it considered as smi in GC).
   5329   const int kStackSpace = PropertyCallbackArguments::kArgsLength + 2;
   5330   // Allocate space for opional callback address parameter in case
   5331   // CPU profiler is active.
   5332   const int kApiArgc = 2 + 1;
   5333 
   5334   Register api_function_address = edx;
   5335   Register scratch = ebx;
   5336 
   5337   // load address of name
   5338   __ lea(scratch, Operand(esp, 1 * kPointerSize));
   5339 
   5340   PrepareCallApiFunction(masm, kApiArgc);
   5341   __ mov(ApiParameterOperand(0), scratch);  // name.
   5342   __ add(scratch, Immediate(kPointerSize));
   5343   __ mov(ApiParameterOperand(1), scratch);  // arguments pointer.
   5344 
   5345   ExternalReference thunk_ref =
   5346       ExternalReference::invoke_accessor_getter_callback(isolate());
   5347 
   5348   CallApiFunctionAndReturn(masm, api_function_address, thunk_ref,
   5349                            ApiParameterOperand(2), kStackSpace, nullptr,
   5350                            Operand(ebp, 7 * kPointerSize), NULL);
   5351 }
   5352 
   5353 
   5354 #undef __
   5355 
   5356 }  // namespace internal
   5357 }  // namespace v8
   5358 
   5359 #endif  // V8_TARGET_ARCH_X87
   5360