Home | History | Annotate | Download | only in ia32
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #if V8_TARGET_ARCH_IA32
      6 
      7 #include "src/code-stubs.h"
      8 #include "src/api-arguments.h"
      9 #include "src/base/bits.h"
     10 #include "src/bootstrapper.h"
     11 #include "src/codegen.h"
     12 #include "src/ia32/code-stubs-ia32.h"
     13 #include "src/ia32/frames-ia32.h"
     14 #include "src/ic/handler-compiler.h"
     15 #include "src/ic/ic.h"
     16 #include "src/ic/stub-cache.h"
     17 #include "src/isolate.h"
     18 #include "src/regexp/jsregexp.h"
     19 #include "src/regexp/regexp-macro-assembler.h"
     20 #include "src/runtime/runtime.h"
     21 
     22 namespace v8 {
     23 namespace internal {
     24 
     25 #define __ ACCESS_MASM(masm)
     26 
     27 void ArrayNArgumentsConstructorStub::Generate(MacroAssembler* masm) {
     28   __ pop(ecx);
     29   __ mov(MemOperand(esp, eax, times_4, 0), edi);
     30   __ push(edi);
     31   __ push(ebx);
     32   __ push(ecx);
     33   __ add(eax, Immediate(3));
     34   __ TailCallRuntime(Runtime::kNewArray);
     35 }
     36 
     37 void FastArrayPushStub::InitializeDescriptor(CodeStubDescriptor* descriptor) {
     38   Address deopt_handler = Runtime::FunctionForId(Runtime::kArrayPush)->entry;
     39   descriptor->Initialize(eax, deopt_handler, -1, JS_FUNCTION_STUB_MODE);
     40 }
     41 
     42 void FastFunctionBindStub::InitializeDescriptor(
     43     CodeStubDescriptor* descriptor) {
     44   Address deopt_handler = Runtime::FunctionForId(Runtime::kFunctionBind)->entry;
     45   descriptor->Initialize(eax, deopt_handler, -1, JS_FUNCTION_STUB_MODE);
     46 }
     47 
     48 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm,
     49                                                ExternalReference miss) {
     50   // Update the static counter each time a new code stub is generated.
     51   isolate()->counters()->code_stubs()->Increment();
     52 
     53   CallInterfaceDescriptor descriptor = GetCallInterfaceDescriptor();
     54   int param_count = descriptor.GetRegisterParameterCount();
     55   {
     56     // Call the runtime system in a fresh internal frame.
     57     FrameScope scope(masm, StackFrame::INTERNAL);
     58     DCHECK(param_count == 0 ||
     59            eax.is(descriptor.GetRegisterParameter(param_count - 1)));
     60     // Push arguments
     61     for (int i = 0; i < param_count; ++i) {
     62       __ push(descriptor.GetRegisterParameter(i));
     63     }
     64     __ CallExternalReference(miss, param_count);
     65   }
     66 
     67   __ ret(0);
     68 }
     69 
     70 
     71 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
     72   // We don't allow a GC during a store buffer overflow so there is no need to
     73   // store the registers in any particular way, but we do have to store and
     74   // restore them.
     75   __ pushad();
     76   if (save_doubles()) {
     77     __ sub(esp, Immediate(kDoubleSize * XMMRegister::kMaxNumRegisters));
     78     for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
     79       XMMRegister reg = XMMRegister::from_code(i);
     80       __ movsd(Operand(esp, i * kDoubleSize), reg);
     81     }
     82   }
     83   const int argument_count = 1;
     84 
     85   AllowExternalCallThatCantCauseGC scope(masm);
     86   __ PrepareCallCFunction(argument_count, ecx);
     87   __ mov(Operand(esp, 0 * kPointerSize),
     88          Immediate(ExternalReference::isolate_address(isolate())));
     89   __ CallCFunction(
     90       ExternalReference::store_buffer_overflow_function(isolate()),
     91       argument_count);
     92   if (save_doubles()) {
     93     for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
     94       XMMRegister reg = XMMRegister::from_code(i);
     95       __ movsd(reg, Operand(esp, i * kDoubleSize));
     96     }
     97     __ add(esp, Immediate(kDoubleSize * XMMRegister::kMaxNumRegisters));
     98   }
     99   __ popad();
    100   __ ret(0);
    101 }
    102 
    103 
    104 class FloatingPointHelper : public AllStatic {
    105  public:
    106   enum ArgLocation {
    107     ARGS_ON_STACK,
    108     ARGS_IN_REGISTERS
    109   };
    110 
    111   // Code pattern for loading a floating point value. Input value must
    112   // be either a smi or a heap number object (fp value). Requirements:
    113   // operand in register number. Returns operand as floating point number
    114   // on FPU stack.
    115   static void LoadFloatOperand(MacroAssembler* masm, Register number);
    116 
    117   // Test if operands are smi or number objects (fp). Requirements:
    118   // operand_1 in eax, operand_2 in edx; falls through on float
    119   // operands, jumps to the non_float label otherwise.
    120   static void CheckFloatOperands(MacroAssembler* masm,
    121                                  Label* non_float,
    122                                  Register scratch);
    123 
    124   // Test if operands are numbers (smi or HeapNumber objects), and load
    125   // them into xmm0 and xmm1 if they are.  Jump to label not_numbers if
    126   // either operand is not a number.  Operands are in edx and eax.
    127   // Leaves operands unchanged.
    128   static void LoadSSE2Operands(MacroAssembler* masm, Label* not_numbers);
    129 };
    130 
    131 
    132 void DoubleToIStub::Generate(MacroAssembler* masm) {
    133   Register input_reg = this->source();
    134   Register final_result_reg = this->destination();
    135   DCHECK(is_truncating());
    136 
    137   Label check_negative, process_64_bits, done, done_no_stash;
    138 
    139   int double_offset = offset();
    140 
    141   // Account for return address and saved regs if input is esp.
    142   if (input_reg.is(esp)) double_offset += 3 * kPointerSize;
    143 
    144   MemOperand mantissa_operand(MemOperand(input_reg, double_offset));
    145   MemOperand exponent_operand(MemOperand(input_reg,
    146                                          double_offset + kDoubleSize / 2));
    147 
    148   Register scratch1;
    149   {
    150     Register scratch_candidates[3] = { ebx, edx, edi };
    151     for (int i = 0; i < 3; i++) {
    152       scratch1 = scratch_candidates[i];
    153       if (!final_result_reg.is(scratch1) && !input_reg.is(scratch1)) break;
    154     }
    155   }
    156   // Since we must use ecx for shifts below, use some other register (eax)
    157   // to calculate the result if ecx is the requested return register.
    158   Register result_reg = final_result_reg.is(ecx) ? eax : final_result_reg;
    159   // Save ecx if it isn't the return register and therefore volatile, or if it
    160   // is the return register, then save the temp register we use in its stead for
    161   // the result.
    162   Register save_reg = final_result_reg.is(ecx) ? eax : ecx;
    163   __ push(scratch1);
    164   __ push(save_reg);
    165 
    166   bool stash_exponent_copy = !input_reg.is(esp);
    167   __ mov(scratch1, mantissa_operand);
    168   if (CpuFeatures::IsSupported(SSE3)) {
    169     CpuFeatureScope scope(masm, SSE3);
    170     // Load x87 register with heap number.
    171     __ fld_d(mantissa_operand);
    172   }
    173   __ mov(ecx, exponent_operand);
    174   if (stash_exponent_copy) __ push(ecx);
    175 
    176   __ and_(ecx, HeapNumber::kExponentMask);
    177   __ shr(ecx, HeapNumber::kExponentShift);
    178   __ lea(result_reg, MemOperand(ecx, -HeapNumber::kExponentBias));
    179   __ cmp(result_reg, Immediate(HeapNumber::kMantissaBits));
    180   __ j(below, &process_64_bits);
    181 
    182   // Result is entirely in lower 32-bits of mantissa
    183   int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize;
    184   if (CpuFeatures::IsSupported(SSE3)) {
    185     __ fstp(0);
    186   }
    187   __ sub(ecx, Immediate(delta));
    188   __ xor_(result_reg, result_reg);
    189   __ cmp(ecx, Immediate(31));
    190   __ j(above, &done);
    191   __ shl_cl(scratch1);
    192   __ jmp(&check_negative);
    193 
    194   __ bind(&process_64_bits);
    195   if (CpuFeatures::IsSupported(SSE3)) {
    196     CpuFeatureScope scope(masm, SSE3);
    197     if (stash_exponent_copy) {
    198       // Already a copy of the exponent on the stack, overwrite it.
    199       STATIC_ASSERT(kDoubleSize == 2 * kPointerSize);
    200       __ sub(esp, Immediate(kDoubleSize / 2));
    201     } else {
    202       // Reserve space for 64 bit answer.
    203       __ sub(esp, Immediate(kDoubleSize));  // Nolint.
    204     }
    205     // Do conversion, which cannot fail because we checked the exponent.
    206     __ fisttp_d(Operand(esp, 0));
    207     __ mov(result_reg, Operand(esp, 0));  // Load low word of answer as result
    208     __ add(esp, Immediate(kDoubleSize));
    209     __ jmp(&done_no_stash);
    210   } else {
    211     // Result must be extracted from shifted 32-bit mantissa
    212     __ sub(ecx, Immediate(delta));
    213     __ neg(ecx);
    214     if (stash_exponent_copy) {
    215       __ mov(result_reg, MemOperand(esp, 0));
    216     } else {
    217       __ mov(result_reg, exponent_operand);
    218     }
    219     __ and_(result_reg,
    220             Immediate(static_cast<uint32_t>(Double::kSignificandMask >> 32)));
    221     __ add(result_reg,
    222            Immediate(static_cast<uint32_t>(Double::kHiddenBit >> 32)));
    223     __ shrd_cl(scratch1, result_reg);
    224     __ shr_cl(result_reg);
    225     __ test(ecx, Immediate(32));
    226     __ cmov(not_equal, scratch1, result_reg);
    227   }
    228 
    229   // If the double was negative, negate the integer result.
    230   __ bind(&check_negative);
    231   __ mov(result_reg, scratch1);
    232   __ neg(result_reg);
    233   if (stash_exponent_copy) {
    234     __ cmp(MemOperand(esp, 0), Immediate(0));
    235   } else {
    236     __ cmp(exponent_operand, Immediate(0));
    237   }
    238     __ cmov(greater, result_reg, scratch1);
    239 
    240   // Restore registers
    241   __ bind(&done);
    242   if (stash_exponent_copy) {
    243     __ add(esp, Immediate(kDoubleSize / 2));
    244   }
    245   __ bind(&done_no_stash);
    246   if (!final_result_reg.is(result_reg)) {
    247     DCHECK(final_result_reg.is(ecx));
    248     __ mov(final_result_reg, result_reg);
    249   }
    250   __ pop(save_reg);
    251   __ pop(scratch1);
    252   __ ret(0);
    253 }
    254 
    255 
    256 void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm,
    257                                            Register number) {
    258   Label load_smi, done;
    259 
    260   __ JumpIfSmi(number, &load_smi, Label::kNear);
    261   __ fld_d(FieldOperand(number, HeapNumber::kValueOffset));
    262   __ jmp(&done, Label::kNear);
    263 
    264   __ bind(&load_smi);
    265   __ SmiUntag(number);
    266   __ push(number);
    267   __ fild_s(Operand(esp, 0));
    268   __ pop(number);
    269 
    270   __ bind(&done);
    271 }
    272 
    273 
    274 void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm,
    275                                            Label* not_numbers) {
    276   Label load_smi_edx, load_eax, load_smi_eax, load_float_eax, done;
    277   // Load operand in edx into xmm0, or branch to not_numbers.
    278   __ JumpIfSmi(edx, &load_smi_edx, Label::kNear);
    279   Factory* factory = masm->isolate()->factory();
    280   __ cmp(FieldOperand(edx, HeapObject::kMapOffset), factory->heap_number_map());
    281   __ j(not_equal, not_numbers);  // Argument in edx is not a number.
    282   __ movsd(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
    283   __ bind(&load_eax);
    284   // Load operand in eax into xmm1, or branch to not_numbers.
    285   __ JumpIfSmi(eax, &load_smi_eax, Label::kNear);
    286   __ cmp(FieldOperand(eax, HeapObject::kMapOffset), factory->heap_number_map());
    287   __ j(equal, &load_float_eax, Label::kNear);
    288   __ jmp(not_numbers);  // Argument in eax is not a number.
    289   __ bind(&load_smi_edx);
    290   __ SmiUntag(edx);  // Untag smi before converting to float.
    291   __ Cvtsi2sd(xmm0, edx);
    292   __ SmiTag(edx);  // Retag smi for heap number overwriting test.
    293   __ jmp(&load_eax);
    294   __ bind(&load_smi_eax);
    295   __ SmiUntag(eax);  // Untag smi before converting to float.
    296   __ Cvtsi2sd(xmm1, eax);
    297   __ SmiTag(eax);  // Retag smi for heap number overwriting test.
    298   __ jmp(&done, Label::kNear);
    299   __ bind(&load_float_eax);
    300   __ movsd(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
    301   __ bind(&done);
    302 }
    303 
    304 
    305 void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm,
    306                                              Label* non_float,
    307                                              Register scratch) {
    308   Label test_other, done;
    309   // Test if both operands are floats or smi -> scratch=k_is_float;
    310   // Otherwise scratch = k_not_float.
    311   __ JumpIfSmi(edx, &test_other, Label::kNear);
    312   __ mov(scratch, FieldOperand(edx, HeapObject::kMapOffset));
    313   Factory* factory = masm->isolate()->factory();
    314   __ cmp(scratch, factory->heap_number_map());
    315   __ j(not_equal, non_float);  // argument in edx is not a number -> NaN
    316 
    317   __ bind(&test_other);
    318   __ JumpIfSmi(eax, &done, Label::kNear);
    319   __ mov(scratch, FieldOperand(eax, HeapObject::kMapOffset));
    320   __ cmp(scratch, factory->heap_number_map());
    321   __ j(not_equal, non_float);  // argument in eax is not a number -> NaN
    322 
    323   // Fall-through: Both operands are numbers.
    324   __ bind(&done);
    325 }
    326 
    327 
    328 void MathPowStub::Generate(MacroAssembler* masm) {
    329   Factory* factory = isolate()->factory();
    330   const Register exponent = MathPowTaggedDescriptor::exponent();
    331   DCHECK(exponent.is(eax));
    332   const Register base = edx;
    333   const Register scratch = ecx;
    334   const XMMRegister double_result = xmm3;
    335   const XMMRegister double_base = xmm2;
    336   const XMMRegister double_exponent = xmm1;
    337   const XMMRegister double_scratch = xmm4;
    338 
    339   Label call_runtime, done, exponent_not_smi, int_exponent;
    340 
    341   // Save 1 in double_result - we need this several times later on.
    342   __ mov(scratch, Immediate(1));
    343   __ Cvtsi2sd(double_result, scratch);
    344 
    345   if (exponent_type() == ON_STACK) {
    346     Label base_is_smi, unpack_exponent;
    347     // The exponent and base are supplied as arguments on the stack.
    348     // This can only happen if the stub is called from non-optimized code.
    349     // Load input parameters from stack.
    350     __ mov(base, Operand(esp, 2 * kPointerSize));
    351     __ mov(exponent, Operand(esp, 1 * kPointerSize));
    352 
    353     __ JumpIfSmi(base, &base_is_smi, Label::kNear);
    354     __ cmp(FieldOperand(base, HeapObject::kMapOffset),
    355            factory->heap_number_map());
    356     __ j(not_equal, &call_runtime);
    357 
    358     __ movsd(double_base, FieldOperand(base, HeapNumber::kValueOffset));
    359     __ jmp(&unpack_exponent, Label::kNear);
    360 
    361     __ bind(&base_is_smi);
    362     __ SmiUntag(base);
    363     __ Cvtsi2sd(double_base, base);
    364 
    365     __ bind(&unpack_exponent);
    366     __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
    367     __ SmiUntag(exponent);
    368     __ jmp(&int_exponent);
    369 
    370     __ bind(&exponent_not_smi);
    371     __ cmp(FieldOperand(exponent, HeapObject::kMapOffset),
    372            factory->heap_number_map());
    373     __ j(not_equal, &call_runtime);
    374     __ movsd(double_exponent,
    375               FieldOperand(exponent, HeapNumber::kValueOffset));
    376   } else if (exponent_type() == TAGGED) {
    377     __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
    378     __ SmiUntag(exponent);
    379     __ jmp(&int_exponent);
    380 
    381     __ bind(&exponent_not_smi);
    382     __ movsd(double_exponent,
    383               FieldOperand(exponent, HeapNumber::kValueOffset));
    384   }
    385 
    386   if (exponent_type() != INTEGER) {
    387     Label fast_power, try_arithmetic_simplification;
    388     __ DoubleToI(exponent, double_exponent, double_scratch,
    389                  TREAT_MINUS_ZERO_AS_ZERO, &try_arithmetic_simplification,
    390                  &try_arithmetic_simplification,
    391                  &try_arithmetic_simplification);
    392     __ jmp(&int_exponent);
    393 
    394     __ bind(&try_arithmetic_simplification);
    395     // Skip to runtime if possibly NaN (indicated by the indefinite integer).
    396     __ cvttsd2si(exponent, Operand(double_exponent));
    397     __ cmp(exponent, Immediate(0x1));
    398     __ j(overflow, &call_runtime);
    399 
    400     if (exponent_type() == ON_STACK) {
    401       // Detect square root case.  Crankshaft detects constant +/-0.5 at
    402       // compile time and uses DoMathPowHalf instead.  We then skip this check
    403       // for non-constant cases of +/-0.5 as these hardly occur.
    404       Label continue_sqrt, continue_rsqrt, not_plus_half;
    405       // Test for 0.5.
    406       // Load double_scratch with 0.5.
    407       __ mov(scratch, Immediate(0x3F000000u));
    408       __ movd(double_scratch, scratch);
    409       __ cvtss2sd(double_scratch, double_scratch);
    410       // Already ruled out NaNs for exponent.
    411       __ ucomisd(double_scratch, double_exponent);
    412       __ j(not_equal, &not_plus_half, Label::kNear);
    413 
    414       // Calculates square root of base.  Check for the special case of
    415       // Math.pow(-Infinity, 0.5) == Infinity (ECMA spec, 15.8.2.13).
    416       // According to IEEE-754, single-precision -Infinity has the highest
    417       // 9 bits set and the lowest 23 bits cleared.
    418       __ mov(scratch, 0xFF800000u);
    419       __ movd(double_scratch, scratch);
    420       __ cvtss2sd(double_scratch, double_scratch);
    421       __ ucomisd(double_base, double_scratch);
    422       // Comparing -Infinity with NaN results in "unordered", which sets the
    423       // zero flag as if both were equal.  However, it also sets the carry flag.
    424       __ j(not_equal, &continue_sqrt, Label::kNear);
    425       __ j(carry, &continue_sqrt, Label::kNear);
    426 
    427       // Set result to Infinity in the special case.
    428       __ xorps(double_result, double_result);
    429       __ subsd(double_result, double_scratch);
    430       __ jmp(&done);
    431 
    432       __ bind(&continue_sqrt);
    433       // sqrtsd returns -0 when input is -0.  ECMA spec requires +0.
    434       __ xorps(double_scratch, double_scratch);
    435       __ addsd(double_scratch, double_base);  // Convert -0 to +0.
    436       __ sqrtsd(double_result, double_scratch);
    437       __ jmp(&done);
    438 
    439       // Test for -0.5.
    440       __ bind(&not_plus_half);
    441       // Load double_exponent with -0.5 by substracting 1.
    442       __ subsd(double_scratch, double_result);
    443       // Already ruled out NaNs for exponent.
    444       __ ucomisd(double_scratch, double_exponent);
    445       __ j(not_equal, &fast_power, Label::kNear);
    446 
    447       // Calculates reciprocal of square root of base.  Check for the special
    448       // case of Math.pow(-Infinity, -0.5) == 0 (ECMA spec, 15.8.2.13).
    449       // According to IEEE-754, single-precision -Infinity has the highest
    450       // 9 bits set and the lowest 23 bits cleared.
    451       __ mov(scratch, 0xFF800000u);
    452       __ movd(double_scratch, scratch);
    453       __ cvtss2sd(double_scratch, double_scratch);
    454       __ ucomisd(double_base, double_scratch);
    455       // Comparing -Infinity with NaN results in "unordered", which sets the
    456       // zero flag as if both were equal.  However, it also sets the carry flag.
    457       __ j(not_equal, &continue_rsqrt, Label::kNear);
    458       __ j(carry, &continue_rsqrt, Label::kNear);
    459 
    460       // Set result to 0 in the special case.
    461       __ xorps(double_result, double_result);
    462       __ jmp(&done);
    463 
    464       __ bind(&continue_rsqrt);
    465       // sqrtsd returns -0 when input is -0.  ECMA spec requires +0.
    466       __ xorps(double_exponent, double_exponent);
    467       __ addsd(double_exponent, double_base);  // Convert -0 to +0.
    468       __ sqrtsd(double_exponent, double_exponent);
    469       __ divsd(double_result, double_exponent);
    470       __ jmp(&done);
    471     }
    472 
    473     // Using FPU instructions to calculate power.
    474     Label fast_power_failed;
    475     __ bind(&fast_power);
    476     __ fnclex();  // Clear flags to catch exceptions later.
    477     // Transfer (B)ase and (E)xponent onto the FPU register stack.
    478     __ sub(esp, Immediate(kDoubleSize));
    479     __ movsd(Operand(esp, 0), double_exponent);
    480     __ fld_d(Operand(esp, 0));  // E
    481     __ movsd(Operand(esp, 0), double_base);
    482     __ fld_d(Operand(esp, 0));  // B, E
    483 
    484     // Exponent is in st(1) and base is in st(0)
    485     // B ^ E = (2^(E * log2(B)) - 1) + 1 = (2^X - 1) + 1 for X = E * log2(B)
    486     // FYL2X calculates st(1) * log2(st(0))
    487     __ fyl2x();    // X
    488     __ fld(0);     // X, X
    489     __ frndint();  // rnd(X), X
    490     __ fsub(1);    // rnd(X), X-rnd(X)
    491     __ fxch(1);    // X - rnd(X), rnd(X)
    492     // F2XM1 calculates 2^st(0) - 1 for -1 < st(0) < 1
    493     __ f2xm1();    // 2^(X-rnd(X)) - 1, rnd(X)
    494     __ fld1();     // 1, 2^(X-rnd(X)) - 1, rnd(X)
    495     __ faddp(1);   // 2^(X-rnd(X)), rnd(X)
    496     // FSCALE calculates st(0) * 2^st(1)
    497     __ fscale();   // 2^X, rnd(X)
    498     __ fstp(1);    // 2^X
    499     // Bail out to runtime in case of exceptions in the status word.
    500     __ fnstsw_ax();
    501     __ test_b(eax,
    502               Immediate(0x5F));  // We check for all but precision exception.
    503     __ j(not_zero, &fast_power_failed, Label::kNear);
    504     __ fstp_d(Operand(esp, 0));
    505     __ movsd(double_result, Operand(esp, 0));
    506     __ add(esp, Immediate(kDoubleSize));
    507     __ jmp(&done);
    508 
    509     __ bind(&fast_power_failed);
    510     __ fninit();
    511     __ add(esp, Immediate(kDoubleSize));
    512     __ jmp(&call_runtime);
    513   }
    514 
    515   // Calculate power with integer exponent.
    516   __ bind(&int_exponent);
    517   const XMMRegister double_scratch2 = double_exponent;
    518   __ mov(scratch, exponent);  // Back up exponent.
    519   __ movsd(double_scratch, double_base);  // Back up base.
    520   __ movsd(double_scratch2, double_result);  // Load double_exponent with 1.
    521 
    522   // Get absolute value of exponent.
    523   Label no_neg, while_true, while_false;
    524   __ test(scratch, scratch);
    525   __ j(positive, &no_neg, Label::kNear);
    526   __ neg(scratch);
    527   __ bind(&no_neg);
    528 
    529   __ j(zero, &while_false, Label::kNear);
    530   __ shr(scratch, 1);
    531   // Above condition means CF==0 && ZF==0.  This means that the
    532   // bit that has been shifted out is 0 and the result is not 0.
    533   __ j(above, &while_true, Label::kNear);
    534   __ movsd(double_result, double_scratch);
    535   __ j(zero, &while_false, Label::kNear);
    536 
    537   __ bind(&while_true);
    538   __ shr(scratch, 1);
    539   __ mulsd(double_scratch, double_scratch);
    540   __ j(above, &while_true, Label::kNear);
    541   __ mulsd(double_result, double_scratch);
    542   __ j(not_zero, &while_true);
    543 
    544   __ bind(&while_false);
    545   // scratch has the original value of the exponent - if the exponent is
    546   // negative, return 1/result.
    547   __ test(exponent, exponent);
    548   __ j(positive, &done);
    549   __ divsd(double_scratch2, double_result);
    550   __ movsd(double_result, double_scratch2);
    551   // Test whether result is zero.  Bail out to check for subnormal result.
    552   // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
    553   __ xorps(double_scratch2, double_scratch2);
    554   __ ucomisd(double_scratch2, double_result);  // Result cannot be NaN.
    555   // double_exponent aliased as double_scratch2 has already been overwritten
    556   // and may not have contained the exponent value in the first place when the
    557   // exponent is a smi.  We reset it with exponent value before bailing out.
    558   __ j(not_equal, &done);
    559   __ Cvtsi2sd(double_exponent, exponent);
    560 
    561   // Returning or bailing out.
    562   if (exponent_type() == ON_STACK) {
    563     // The arguments are still on the stack.
    564     __ bind(&call_runtime);
    565     __ TailCallRuntime(Runtime::kMathPowRT);
    566 
    567     // The stub is called from non-optimized code, which expects the result
    568     // as heap number in exponent.
    569     __ bind(&done);
    570     __ AllocateHeapNumber(eax, scratch, base, &call_runtime);
    571     __ movsd(FieldOperand(eax, HeapNumber::kValueOffset), double_result);
    572     __ ret(2 * kPointerSize);
    573   } else {
    574     __ bind(&call_runtime);
    575     {
    576       AllowExternalCallThatCantCauseGC scope(masm);
    577       __ PrepareCallCFunction(4, scratch);
    578       __ movsd(Operand(esp, 0 * kDoubleSize), double_base);
    579       __ movsd(Operand(esp, 1 * kDoubleSize), double_exponent);
    580       __ CallCFunction(
    581           ExternalReference::power_double_double_function(isolate()), 4);
    582     }
    583     // Return value is in st(0) on ia32.
    584     // Store it into the (fixed) result register.
    585     __ sub(esp, Immediate(kDoubleSize));
    586     __ fstp_d(Operand(esp, 0));
    587     __ movsd(double_result, Operand(esp, 0));
    588     __ add(esp, Immediate(kDoubleSize));
    589 
    590     __ bind(&done);
    591     __ ret(0);
    592   }
    593 }
    594 
    595 
    596 void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
    597   Label miss;
    598   Register receiver = LoadDescriptor::ReceiverRegister();
    599   // With careful management, we won't have to save slot and vector on
    600   // the stack. Simply handle the possibly missing case first.
    601   // TODO(mvstanton): this code can be more efficient.
    602   __ cmp(FieldOperand(receiver, JSFunction::kPrototypeOrInitialMapOffset),
    603          Immediate(isolate()->factory()->the_hole_value()));
    604   __ j(equal, &miss);
    605   __ TryGetFunctionPrototype(receiver, eax, ebx, &miss);
    606   __ ret(0);
    607 
    608   __ bind(&miss);
    609   PropertyAccessCompiler::TailCallBuiltin(
    610       masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC));
    611 }
    612 
    613 
    614 void LoadIndexedStringStub::Generate(MacroAssembler* masm) {
    615   // Return address is on the stack.
    616   Label miss;
    617 
    618   Register receiver = LoadDescriptor::ReceiverRegister();
    619   Register index = LoadDescriptor::NameRegister();
    620   Register scratch = edi;
    621   DCHECK(!scratch.is(receiver) && !scratch.is(index));
    622   Register result = eax;
    623   DCHECK(!result.is(scratch));
    624   DCHECK(!scratch.is(LoadWithVectorDescriptor::VectorRegister()) &&
    625          result.is(LoadDescriptor::SlotRegister()));
    626 
    627   // StringCharAtGenerator doesn't use the result register until it's passed
    628   // the different miss possibilities. If it did, we would have a conflict
    629   // when FLAG_vector_ics is true.
    630   StringCharAtGenerator char_at_generator(receiver, index, scratch, result,
    631                                           &miss,  // When not a string.
    632                                           &miss,  // When not a number.
    633                                           &miss,  // When index out of range.
    634                                           RECEIVER_IS_STRING);
    635   char_at_generator.GenerateFast(masm);
    636   __ ret(0);
    637 
    638   StubRuntimeCallHelper call_helper;
    639   char_at_generator.GenerateSlow(masm, PART_OF_IC_HANDLER, call_helper);
    640 
    641   __ bind(&miss);
    642   PropertyAccessCompiler::TailCallBuiltin(
    643       masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
    644 }
    645 
    646 
    647 void RegExpExecStub::Generate(MacroAssembler* masm) {
    648   // Just jump directly to runtime if native RegExp is not selected at compile
    649   // time or if regexp entry in generated code is turned off runtime switch or
    650   // at compilation.
    651 #ifdef V8_INTERPRETED_REGEXP
    652   __ TailCallRuntime(Runtime::kRegExpExec);
    653 #else  // V8_INTERPRETED_REGEXP
    654 
    655   // Stack frame on entry.
    656   //  esp[0]: return address
    657   //  esp[4]: last_match_info (expected JSArray)
    658   //  esp[8]: previous index
    659   //  esp[12]: subject string
    660   //  esp[16]: JSRegExp object
    661 
    662   static const int kLastMatchInfoOffset = 1 * kPointerSize;
    663   static const int kPreviousIndexOffset = 2 * kPointerSize;
    664   static const int kSubjectOffset = 3 * kPointerSize;
    665   static const int kJSRegExpOffset = 4 * kPointerSize;
    666 
    667   Label runtime;
    668   Factory* factory = isolate()->factory();
    669 
    670   // Ensure that a RegExp stack is allocated.
    671   ExternalReference address_of_regexp_stack_memory_address =
    672       ExternalReference::address_of_regexp_stack_memory_address(isolate());
    673   ExternalReference address_of_regexp_stack_memory_size =
    674       ExternalReference::address_of_regexp_stack_memory_size(isolate());
    675   __ mov(ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
    676   __ test(ebx, ebx);
    677   __ j(zero, &runtime);
    678 
    679   // Check that the first argument is a JSRegExp object.
    680   __ mov(eax, Operand(esp, kJSRegExpOffset));
    681   STATIC_ASSERT(kSmiTag == 0);
    682   __ JumpIfSmi(eax, &runtime);
    683   __ CmpObjectType(eax, JS_REGEXP_TYPE, ecx);
    684   __ j(not_equal, &runtime);
    685 
    686   // Check that the RegExp has been compiled (data contains a fixed array).
    687   __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
    688   if (FLAG_debug_code) {
    689     __ test(ecx, Immediate(kSmiTagMask));
    690     __ Check(not_zero, kUnexpectedTypeForRegExpDataFixedArrayExpected);
    691     __ CmpObjectType(ecx, FIXED_ARRAY_TYPE, ebx);
    692     __ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected);
    693   }
    694 
    695   // ecx: RegExp data (FixedArray)
    696   // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
    697   __ mov(ebx, FieldOperand(ecx, JSRegExp::kDataTagOffset));
    698   __ cmp(ebx, Immediate(Smi::FromInt(JSRegExp::IRREGEXP)));
    699   __ j(not_equal, &runtime);
    700 
    701   // ecx: RegExp data (FixedArray)
    702   // Check that the number of captures fit in the static offsets vector buffer.
    703   __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset));
    704   // Check (number_of_captures + 1) * 2 <= offsets vector size
    705   // Or          number_of_captures * 2 <= offsets vector size - 2
    706   // Multiplying by 2 comes for free since edx is smi-tagged.
    707   STATIC_ASSERT(kSmiTag == 0);
    708   STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
    709   STATIC_ASSERT(Isolate::kJSRegexpStaticOffsetsVectorSize >= 2);
    710   __ cmp(edx, Isolate::kJSRegexpStaticOffsetsVectorSize - 2);
    711   __ j(above, &runtime);
    712 
    713   // Reset offset for possibly sliced string.
    714   __ Move(edi, Immediate(0));
    715   __ mov(eax, Operand(esp, kSubjectOffset));
    716   __ JumpIfSmi(eax, &runtime);
    717   __ mov(edx, eax);  // Make a copy of the original subject string.
    718 
    719   // eax: subject string
    720   // edx: subject string
    721   // ecx: RegExp data (FixedArray)
    722   // Handle subject string according to its encoding and representation:
    723   // (1) Sequential two byte?  If yes, go to (9).
    724   // (2) Sequential one byte?  If yes, go to (5).
    725   // (3) Sequential or cons?  If not, go to (6).
    726   // (4) Cons string.  If the string is flat, replace subject with first string
    727   //     and go to (1). Otherwise bail out to runtime.
    728   // (5) One byte sequential.  Load regexp code for one byte.
    729   // (E) Carry on.
    730   /// [...]
    731 
    732   // Deferred code at the end of the stub:
    733   // (6) Long external string?  If not, go to (10).
    734   // (7) External string.  Make it, offset-wise, look like a sequential string.
    735   // (8) Is the external string one byte?  If yes, go to (5).
    736   // (9) Two byte sequential.  Load regexp code for two byte. Go to (E).
    737   // (10) Short external string or not a string?  If yes, bail out to runtime.
    738   // (11) Sliced string.  Replace subject with parent. Go to (1).
    739 
    740   Label seq_one_byte_string /* 5 */, seq_two_byte_string /* 9 */,
    741       external_string /* 7 */, check_underlying /* 1 */,
    742       not_seq_nor_cons /* 6 */, check_code /* E */, not_long_external /* 10 */;
    743 
    744   __ bind(&check_underlying);
    745   // (1) Sequential two byte?  If yes, go to (9).
    746   __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
    747   __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
    748 
    749   __ and_(ebx, kIsNotStringMask |
    750                kStringRepresentationMask |
    751                kStringEncodingMask |
    752                kShortExternalStringMask);
    753   STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0);
    754   __ j(zero, &seq_two_byte_string);  // Go to (9).
    755 
    756   // (2) Sequential one byte?  If yes, go to (5).
    757   // Any other sequential string must be one byte.
    758   __ and_(ebx, Immediate(kIsNotStringMask |
    759                          kStringRepresentationMask |
    760                          kShortExternalStringMask));
    761   __ j(zero, &seq_one_byte_string, Label::kNear);  // Go to (5).
    762 
    763   // (3) Sequential or cons?  If not, go to (6).
    764   // We check whether the subject string is a cons, since sequential strings
    765   // have already been covered.
    766   STATIC_ASSERT(kConsStringTag < kExternalStringTag);
    767   STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
    768   STATIC_ASSERT(kIsNotStringMask > kExternalStringTag);
    769   STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag);
    770   __ cmp(ebx, Immediate(kExternalStringTag));
    771   __ j(greater_equal, &not_seq_nor_cons);  // Go to (6).
    772 
    773   // (4) Cons string.  Check that it's flat.
    774   // Replace subject with first string and reload instance type.
    775   __ cmp(FieldOperand(eax, ConsString::kSecondOffset), factory->empty_string());
    776   __ j(not_equal, &runtime);
    777   __ mov(eax, FieldOperand(eax, ConsString::kFirstOffset));
    778   __ jmp(&check_underlying);
    779 
    780   // eax: sequential subject string (or look-alike, external string)
    781   // edx: original subject string
    782   // ecx: RegExp data (FixedArray)
    783   // (5) One byte sequential.  Load regexp code for one byte.
    784   __ bind(&seq_one_byte_string);
    785   // Load previous index and check range before edx is overwritten.  We have
    786   // to use edx instead of eax here because it might have been only made to
    787   // look like a sequential string when it actually is an external string.
    788   __ mov(ebx, Operand(esp, kPreviousIndexOffset));
    789   __ JumpIfNotSmi(ebx, &runtime);
    790   __ cmp(ebx, FieldOperand(edx, String::kLengthOffset));
    791   __ j(above_equal, &runtime);
    792   __ mov(edx, FieldOperand(ecx, JSRegExp::kDataOneByteCodeOffset));
    793   __ Move(ecx, Immediate(1));  // Type is one byte.
    794 
    795   // (E) Carry on.  String handling is done.
    796   __ bind(&check_code);
    797   // edx: irregexp code
    798   // Check that the irregexp code has been generated for the actual string
    799   // encoding. If it has, the field contains a code object otherwise it contains
    800   // a smi (code flushing support).
    801   __ JumpIfSmi(edx, &runtime);
    802 
    803   // eax: subject string
    804   // ebx: previous index (smi)
    805   // edx: code
    806   // ecx: encoding of subject string (1 if one_byte, 0 if two_byte);
    807   // All checks done. Now push arguments for native regexp code.
    808   Counters* counters = isolate()->counters();
    809   __ IncrementCounter(counters->regexp_entry_native(), 1);
    810 
    811   // Isolates: note we add an additional parameter here (isolate pointer).
    812   static const int kRegExpExecuteArguments = 9;
    813   __ EnterApiExitFrame(kRegExpExecuteArguments);
    814 
    815   // Argument 9: Pass current isolate address.
    816   __ mov(Operand(esp, 8 * kPointerSize),
    817       Immediate(ExternalReference::isolate_address(isolate())));
    818 
    819   // Argument 8: Indicate that this is a direct call from JavaScript.
    820   __ mov(Operand(esp, 7 * kPointerSize), Immediate(1));
    821 
    822   // Argument 7: Start (high end) of backtracking stack memory area.
    823   __ mov(esi, Operand::StaticVariable(address_of_regexp_stack_memory_address));
    824   __ add(esi, Operand::StaticVariable(address_of_regexp_stack_memory_size));
    825   __ mov(Operand(esp, 6 * kPointerSize), esi);
    826 
    827   // Argument 6: Set the number of capture registers to zero to force global
    828   // regexps to behave as non-global.  This does not affect non-global regexps.
    829   __ mov(Operand(esp, 5 * kPointerSize), Immediate(0));
    830 
    831   // Argument 5: static offsets vector buffer.
    832   __ mov(Operand(esp, 4 * kPointerSize),
    833          Immediate(ExternalReference::address_of_static_offsets_vector(
    834              isolate())));
    835 
    836   // Argument 2: Previous index.
    837   __ SmiUntag(ebx);
    838   __ mov(Operand(esp, 1 * kPointerSize), ebx);
    839 
    840   // Argument 1: Original subject string.
    841   // The original subject is in the previous stack frame. Therefore we have to
    842   // use ebp, which points exactly to one pointer size below the previous esp.
    843   // (Because creating a new stack frame pushes the previous ebp onto the stack
    844   // and thereby moves up esp by one kPointerSize.)
    845   __ mov(esi, Operand(ebp, kSubjectOffset + kPointerSize));
    846   __ mov(Operand(esp, 0 * kPointerSize), esi);
    847 
    848   // esi: original subject string
    849   // eax: underlying subject string
    850   // ebx: previous index
    851   // ecx: encoding of subject string (1 if one_byte 0 if two_byte);
    852   // edx: code
    853   // Argument 4: End of string data
    854   // Argument 3: Start of string data
    855   // Prepare start and end index of the input.
    856   // Load the length from the original sliced string if that is the case.
    857   __ mov(esi, FieldOperand(esi, String::kLengthOffset));
    858   __ add(esi, edi);  // Calculate input end wrt offset.
    859   __ SmiUntag(edi);
    860   __ add(ebx, edi);  // Calculate input start wrt offset.
    861 
    862   // ebx: start index of the input string
    863   // esi: end index of the input string
    864   Label setup_two_byte, setup_rest;
    865   __ test(ecx, ecx);
    866   __ j(zero, &setup_two_byte, Label::kNear);
    867   __ SmiUntag(esi);
    868   __ lea(ecx, FieldOperand(eax, esi, times_1, SeqOneByteString::kHeaderSize));
    869   __ mov(Operand(esp, 3 * kPointerSize), ecx);  // Argument 4.
    870   __ lea(ecx, FieldOperand(eax, ebx, times_1, SeqOneByteString::kHeaderSize));
    871   __ mov(Operand(esp, 2 * kPointerSize), ecx);  // Argument 3.
    872   __ jmp(&setup_rest, Label::kNear);
    873 
    874   __ bind(&setup_two_byte);
    875   STATIC_ASSERT(kSmiTag == 0);
    876   STATIC_ASSERT(kSmiTagSize == 1);  // esi is smi (powered by 2).
    877   __ lea(ecx, FieldOperand(eax, esi, times_1, SeqTwoByteString::kHeaderSize));
    878   __ mov(Operand(esp, 3 * kPointerSize), ecx);  // Argument 4.
    879   __ lea(ecx, FieldOperand(eax, ebx, times_2, SeqTwoByteString::kHeaderSize));
    880   __ mov(Operand(esp, 2 * kPointerSize), ecx);  // Argument 3.
    881 
    882   __ bind(&setup_rest);
    883 
    884   // Locate the code entry and call it.
    885   __ add(edx, Immediate(Code::kHeaderSize - kHeapObjectTag));
    886   __ call(edx);
    887 
    888   // Drop arguments and come back to JS mode.
    889   __ LeaveApiExitFrame(true);
    890 
    891   // Check the result.
    892   Label success;
    893   __ cmp(eax, 1);
    894   // We expect exactly one result since we force the called regexp to behave
    895   // as non-global.
    896   __ j(equal, &success);
    897   Label failure;
    898   __ cmp(eax, NativeRegExpMacroAssembler::FAILURE);
    899   __ j(equal, &failure);
    900   __ cmp(eax, NativeRegExpMacroAssembler::EXCEPTION);
    901   // If not exception it can only be retry. Handle that in the runtime system.
    902   __ j(not_equal, &runtime);
    903   // Result must now be exception. If there is no pending exception already a
    904   // stack overflow (on the backtrack stack) was detected in RegExp code but
    905   // haven't created the exception yet. Handle that in the runtime system.
    906   // TODO(592): Rerunning the RegExp to get the stack overflow exception.
    907   ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
    908                                       isolate());
    909   __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
    910   __ mov(eax, Operand::StaticVariable(pending_exception));
    911   __ cmp(edx, eax);
    912   __ j(equal, &runtime);
    913 
    914   // For exception, throw the exception again.
    915   __ TailCallRuntime(Runtime::kRegExpExecReThrow);
    916 
    917   __ bind(&failure);
    918   // For failure to match, return null.
    919   __ mov(eax, factory->null_value());
    920   __ ret(4 * kPointerSize);
    921 
    922   // Load RegExp data.
    923   __ bind(&success);
    924   __ mov(eax, Operand(esp, kJSRegExpOffset));
    925   __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
    926   __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset));
    927   // Calculate number of capture registers (number_of_captures + 1) * 2.
    928   STATIC_ASSERT(kSmiTag == 0);
    929   STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
    930   __ add(edx, Immediate(2));  // edx was a smi.
    931 
    932   // edx: Number of capture registers
    933   // Load last_match_info which is still known to be a fast case JSArray.
    934   // Check that the fourth object is a JSArray object.
    935   __ mov(eax, Operand(esp, kLastMatchInfoOffset));
    936   __ JumpIfSmi(eax, &runtime);
    937   __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
    938   __ j(not_equal, &runtime);
    939   // Check that the JSArray is in fast case.
    940   __ mov(ebx, FieldOperand(eax, JSArray::kElementsOffset));
    941   __ mov(eax, FieldOperand(ebx, HeapObject::kMapOffset));
    942   __ cmp(eax, factory->fixed_array_map());
    943   __ j(not_equal, &runtime);
    944   // Check that the last match info has space for the capture registers and the
    945   // additional information.
    946   __ mov(eax, FieldOperand(ebx, FixedArray::kLengthOffset));
    947   __ SmiUntag(eax);
    948   __ sub(eax, Immediate(RegExpImpl::kLastMatchOverhead));
    949   __ cmp(edx, eax);
    950   __ j(greater, &runtime);
    951 
    952   // ebx: last_match_info backing store (FixedArray)
    953   // edx: number of capture registers
    954   // Store the capture count.
    955   __ SmiTag(edx);  // Number of capture registers to smi.
    956   __ mov(FieldOperand(ebx, RegExpImpl::kLastCaptureCountOffset), edx);
    957   __ SmiUntag(edx);  // Number of capture registers back from smi.
    958   // Store last subject and last input.
    959   __ mov(eax, Operand(esp, kSubjectOffset));
    960   __ mov(ecx, eax);
    961   __ mov(FieldOperand(ebx, RegExpImpl::kLastSubjectOffset), eax);
    962   __ RecordWriteField(ebx,
    963                       RegExpImpl::kLastSubjectOffset,
    964                       eax,
    965                       edi,
    966                       kDontSaveFPRegs);
    967   __ mov(eax, ecx);
    968   __ mov(FieldOperand(ebx, RegExpImpl::kLastInputOffset), eax);
    969   __ RecordWriteField(ebx,
    970                       RegExpImpl::kLastInputOffset,
    971                       eax,
    972                       edi,
    973                       kDontSaveFPRegs);
    974 
    975   // Get the static offsets vector filled by the native regexp code.
    976   ExternalReference address_of_static_offsets_vector =
    977       ExternalReference::address_of_static_offsets_vector(isolate());
    978   __ mov(ecx, Immediate(address_of_static_offsets_vector));
    979 
    980   // ebx: last_match_info backing store (FixedArray)
    981   // ecx: offsets vector
    982   // edx: number of capture registers
    983   Label next_capture, done;
    984   // Capture register counter starts from number of capture registers and
    985   // counts down until wraping after zero.
    986   __ bind(&next_capture);
    987   __ sub(edx, Immediate(1));
    988   __ j(negative, &done, Label::kNear);
    989   // Read the value from the static offsets vector buffer.
    990   __ mov(edi, Operand(ecx, edx, times_int_size, 0));
    991   __ SmiTag(edi);
    992   // Store the smi value in the last match info.
    993   __ mov(FieldOperand(ebx,
    994                       edx,
    995                       times_pointer_size,
    996                       RegExpImpl::kFirstCaptureOffset),
    997                       edi);
    998   __ jmp(&next_capture);
    999   __ bind(&done);
   1000 
   1001   // Return last match info.
   1002   __ mov(eax, Operand(esp, kLastMatchInfoOffset));
   1003   __ ret(4 * kPointerSize);
   1004 
   1005   // Do the runtime call to execute the regexp.
   1006   __ bind(&runtime);
   1007   __ TailCallRuntime(Runtime::kRegExpExec);
   1008 
   1009   // Deferred code for string handling.
   1010   // (6) Long external string?  If not, go to (10).
   1011   __ bind(&not_seq_nor_cons);
   1012   // Compare flags are still set from (3).
   1013   __ j(greater, &not_long_external, Label::kNear);  // Go to (10).
   1014 
   1015   // (7) External string.  Short external strings have been ruled out.
   1016   __ bind(&external_string);
   1017   // Reload instance type.
   1018   __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
   1019   __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
   1020   if (FLAG_debug_code) {
   1021     // Assert that we do not have a cons or slice (indirect strings) here.
   1022     // Sequential strings have already been ruled out.
   1023     __ test_b(ebx, Immediate(kIsIndirectStringMask));
   1024     __ Assert(zero, kExternalStringExpectedButNotFound);
   1025   }
   1026   __ mov(eax, FieldOperand(eax, ExternalString::kResourceDataOffset));
   1027   // Move the pointer so that offset-wise, it looks like a sequential string.
   1028   STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
   1029   __ sub(eax, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
   1030   STATIC_ASSERT(kTwoByteStringTag == 0);
   1031   // (8) Is the external string one byte?  If yes, go to (5).
   1032   __ test_b(ebx, Immediate(kStringEncodingMask));
   1033   __ j(not_zero, &seq_one_byte_string);  // Go to (5).
   1034 
   1035   // eax: sequential subject string (or look-alike, external string)
   1036   // edx: original subject string
   1037   // ecx: RegExp data (FixedArray)
   1038   // (9) Two byte sequential.  Load regexp code for two byte. Go to (E).
   1039   __ bind(&seq_two_byte_string);
   1040   // Load previous index and check range before edx is overwritten.  We have
   1041   // to use edx instead of eax here because it might have been only made to
   1042   // look like a sequential string when it actually is an external string.
   1043   __ mov(ebx, Operand(esp, kPreviousIndexOffset));
   1044   __ JumpIfNotSmi(ebx, &runtime);
   1045   __ cmp(ebx, FieldOperand(edx, String::kLengthOffset));
   1046   __ j(above_equal, &runtime);
   1047   __ mov(edx, FieldOperand(ecx, JSRegExp::kDataUC16CodeOffset));
   1048   __ Move(ecx, Immediate(0));  // Type is two byte.
   1049   __ jmp(&check_code);  // Go to (E).
   1050 
   1051   // (10) Not a string or a short external string?  If yes, bail out to runtime.
   1052   __ bind(&not_long_external);
   1053   // Catch non-string subject or short external string.
   1054   STATIC_ASSERT(kNotStringTag != 0 && kShortExternalStringTag !=0);
   1055   __ test(ebx, Immediate(kIsNotStringMask | kShortExternalStringTag));
   1056   __ j(not_zero, &runtime);
   1057 
   1058   // (11) Sliced string.  Replace subject with parent.  Go to (1).
   1059   // Load offset into edi and replace subject string with parent.
   1060   __ mov(edi, FieldOperand(eax, SlicedString::kOffsetOffset));
   1061   __ mov(eax, FieldOperand(eax, SlicedString::kParentOffset));
   1062   __ jmp(&check_underlying);  // Go to (1).
   1063 #endif  // V8_INTERPRETED_REGEXP
   1064 }
   1065 
   1066 
   1067 static int NegativeComparisonResult(Condition cc) {
   1068   DCHECK(cc != equal);
   1069   DCHECK((cc == less) || (cc == less_equal)
   1070       || (cc == greater) || (cc == greater_equal));
   1071   return (cc == greater || cc == greater_equal) ? LESS : GREATER;
   1072 }
   1073 
   1074 
   1075 static void CheckInputType(MacroAssembler* masm, Register input,
   1076                            CompareICState::State expected, Label* fail) {
   1077   Label ok;
   1078   if (expected == CompareICState::SMI) {
   1079     __ JumpIfNotSmi(input, fail);
   1080   } else if (expected == CompareICState::NUMBER) {
   1081     __ JumpIfSmi(input, &ok);
   1082     __ cmp(FieldOperand(input, HeapObject::kMapOffset),
   1083            Immediate(masm->isolate()->factory()->heap_number_map()));
   1084     __ j(not_equal, fail);
   1085   }
   1086   // We could be strict about internalized/non-internalized here, but as long as
   1087   // hydrogen doesn't care, the stub doesn't have to care either.
   1088   __ bind(&ok);
   1089 }
   1090 
   1091 
   1092 static void BranchIfNotInternalizedString(MacroAssembler* masm,
   1093                                           Label* label,
   1094                                           Register object,
   1095                                           Register scratch) {
   1096   __ JumpIfSmi(object, label);
   1097   __ mov(scratch, FieldOperand(object, HeapObject::kMapOffset));
   1098   __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
   1099   STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
   1100   __ test(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
   1101   __ j(not_zero, label);
   1102 }
   1103 
   1104 
   1105 void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
   1106   Label runtime_call, check_unequal_objects;
   1107   Condition cc = GetCondition();
   1108 
   1109   Label miss;
   1110   CheckInputType(masm, edx, left(), &miss);
   1111   CheckInputType(masm, eax, right(), &miss);
   1112 
   1113   // Compare two smis.
   1114   Label non_smi, smi_done;
   1115   __ mov(ecx, edx);
   1116   __ or_(ecx, eax);
   1117   __ JumpIfNotSmi(ecx, &non_smi, Label::kNear);
   1118   __ sub(edx, eax);  // Return on the result of the subtraction.
   1119   __ j(no_overflow, &smi_done, Label::kNear);
   1120   __ not_(edx);  // Correct sign in case of overflow. edx is never 0 here.
   1121   __ bind(&smi_done);
   1122   __ mov(eax, edx);
   1123   __ ret(0);
   1124   __ bind(&non_smi);
   1125 
   1126   // NOTICE! This code is only reached after a smi-fast-case check, so
   1127   // it is certain that at least one operand isn't a smi.
   1128 
   1129   // Identical objects can be compared fast, but there are some tricky cases
   1130   // for NaN and undefined.
   1131   Label generic_heap_number_comparison;
   1132   {
   1133     Label not_identical;
   1134     __ cmp(eax, edx);
   1135     __ j(not_equal, &not_identical);
   1136 
   1137     if (cc != equal) {
   1138       // Check for undefined.  undefined OP undefined is false even though
   1139       // undefined == undefined.
   1140       __ cmp(edx, isolate()->factory()->undefined_value());
   1141       Label check_for_nan;
   1142       __ j(not_equal, &check_for_nan, Label::kNear);
   1143       __ Move(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc))));
   1144       __ ret(0);
   1145       __ bind(&check_for_nan);
   1146     }
   1147 
   1148     // Test for NaN. Compare heap numbers in a general way,
   1149     // to handle NaNs correctly.
   1150     __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
   1151            Immediate(isolate()->factory()->heap_number_map()));
   1152     __ j(equal, &generic_heap_number_comparison, Label::kNear);
   1153     if (cc != equal) {
   1154       __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
   1155       __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
   1156       // Call runtime on identical JSObjects.  Otherwise return equal.
   1157       __ cmpb(ecx, Immediate(FIRST_JS_RECEIVER_TYPE));
   1158       __ j(above_equal, &runtime_call, Label::kFar);
   1159       // Call runtime on identical symbols since we need to throw a TypeError.
   1160       __ cmpb(ecx, Immediate(SYMBOL_TYPE));
   1161       __ j(equal, &runtime_call, Label::kFar);
   1162       // Call runtime on identical SIMD values since we must throw a TypeError.
   1163       __ cmpb(ecx, Immediate(SIMD128_VALUE_TYPE));
   1164       __ j(equal, &runtime_call, Label::kFar);
   1165     }
   1166     __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
   1167     __ ret(0);
   1168 
   1169 
   1170     __ bind(&not_identical);
   1171   }
   1172 
   1173   // Strict equality can quickly decide whether objects are equal.
   1174   // Non-strict object equality is slower, so it is handled later in the stub.
   1175   if (cc == equal && strict()) {
   1176     Label slow;  // Fallthrough label.
   1177     Label not_smis;
   1178     // If we're doing a strict equality comparison, we don't have to do
   1179     // type conversion, so we generate code to do fast comparison for objects
   1180     // and oddballs. Non-smi numbers and strings still go through the usual
   1181     // slow-case code.
   1182     // If either is a Smi (we know that not both are), then they can only
   1183     // be equal if the other is a HeapNumber. If so, use the slow case.
   1184     STATIC_ASSERT(kSmiTag == 0);
   1185     DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
   1186     __ mov(ecx, Immediate(kSmiTagMask));
   1187     __ and_(ecx, eax);
   1188     __ test(ecx, edx);
   1189     __ j(not_zero, &not_smis, Label::kNear);
   1190     // One operand is a smi.
   1191 
   1192     // Check whether the non-smi is a heap number.
   1193     STATIC_ASSERT(kSmiTagMask == 1);
   1194     // ecx still holds eax & kSmiTag, which is either zero or one.
   1195     __ sub(ecx, Immediate(0x01));
   1196     __ mov(ebx, edx);
   1197     __ xor_(ebx, eax);
   1198     __ and_(ebx, ecx);  // ebx holds either 0 or eax ^ edx.
   1199     __ xor_(ebx, eax);
   1200     // if eax was smi, ebx is now edx, else eax.
   1201 
   1202     // Check if the non-smi operand is a heap number.
   1203     __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
   1204            Immediate(isolate()->factory()->heap_number_map()));
   1205     // If heap number, handle it in the slow case.
   1206     __ j(equal, &slow, Label::kNear);
   1207     // Return non-equal (ebx is not zero)
   1208     __ mov(eax, ebx);
   1209     __ ret(0);
   1210 
   1211     __ bind(&not_smis);
   1212     // If either operand is a JSObject or an oddball value, then they are not
   1213     // equal since their pointers are different
   1214     // There is no test for undetectability in strict equality.
   1215 
   1216     // Get the type of the first operand.
   1217     // If the first object is a JS object, we have done pointer comparison.
   1218     Label first_non_object;
   1219     STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
   1220     __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
   1221     __ j(below, &first_non_object, Label::kNear);
   1222 
   1223     // Return non-zero (eax is not zero)
   1224     Label return_not_equal;
   1225     STATIC_ASSERT(kHeapObjectTag != 0);
   1226     __ bind(&return_not_equal);
   1227     __ ret(0);
   1228 
   1229     __ bind(&first_non_object);
   1230     // Check for oddballs: true, false, null, undefined.
   1231     __ CmpInstanceType(ecx, ODDBALL_TYPE);
   1232     __ j(equal, &return_not_equal);
   1233 
   1234     __ CmpObjectType(edx, FIRST_JS_RECEIVER_TYPE, ecx);
   1235     __ j(above_equal, &return_not_equal);
   1236 
   1237     // Check for oddballs: true, false, null, undefined.
   1238     __ CmpInstanceType(ecx, ODDBALL_TYPE);
   1239     __ j(equal, &return_not_equal);
   1240 
   1241     // Fall through to the general case.
   1242     __ bind(&slow);
   1243   }
   1244 
   1245   // Generate the number comparison code.
   1246   Label non_number_comparison;
   1247   Label unordered;
   1248   __ bind(&generic_heap_number_comparison);
   1249 
   1250   FloatingPointHelper::LoadSSE2Operands(masm, &non_number_comparison);
   1251   __ ucomisd(xmm0, xmm1);
   1252   // Don't base result on EFLAGS when a NaN is involved.
   1253   __ j(parity_even, &unordered, Label::kNear);
   1254 
   1255   __ mov(eax, 0);  // equal
   1256   __ mov(ecx, Immediate(Smi::FromInt(1)));
   1257   __ cmov(above, eax, ecx);
   1258   __ mov(ecx, Immediate(Smi::FromInt(-1)));
   1259   __ cmov(below, eax, ecx);
   1260   __ ret(0);
   1261 
   1262   // If one of the numbers was NaN, then the result is always false.
   1263   // The cc is never not-equal.
   1264   __ bind(&unordered);
   1265   DCHECK(cc != not_equal);
   1266   if (cc == less || cc == less_equal) {
   1267     __ mov(eax, Immediate(Smi::FromInt(1)));
   1268   } else {
   1269     __ mov(eax, Immediate(Smi::FromInt(-1)));
   1270   }
   1271   __ ret(0);
   1272 
   1273   // The number comparison code did not provide a valid result.
   1274   __ bind(&non_number_comparison);
   1275 
   1276   // Fast negative check for internalized-to-internalized equality.
   1277   Label check_for_strings;
   1278   if (cc == equal) {
   1279     BranchIfNotInternalizedString(masm, &check_for_strings, eax, ecx);
   1280     BranchIfNotInternalizedString(masm, &check_for_strings, edx, ecx);
   1281 
   1282     // We've already checked for object identity, so if both operands
   1283     // are internalized they aren't equal. Register eax already holds a
   1284     // non-zero value, which indicates not equal, so just return.
   1285     __ ret(0);
   1286   }
   1287 
   1288   __ bind(&check_for_strings);
   1289 
   1290   __ JumpIfNotBothSequentialOneByteStrings(edx, eax, ecx, ebx,
   1291                                            &check_unequal_objects);
   1292 
   1293   // Inline comparison of one-byte strings.
   1294   if (cc == equal) {
   1295     StringHelper::GenerateFlatOneByteStringEquals(masm, edx, eax, ecx, ebx);
   1296   } else {
   1297     StringHelper::GenerateCompareFlatOneByteStrings(masm, edx, eax, ecx, ebx,
   1298                                                     edi);
   1299   }
   1300 #ifdef DEBUG
   1301   __ Abort(kUnexpectedFallThroughFromStringComparison);
   1302 #endif
   1303 
   1304   __ bind(&check_unequal_objects);
   1305   if (cc == equal && !strict()) {
   1306     // Non-strict equality.  Objects are unequal if
   1307     // they are both JSObjects and not undetectable,
   1308     // and their pointers are different.
   1309     Label return_equal, return_unequal, undetectable;
   1310     // At most one is a smi, so we can test for smi by adding the two.
   1311     // A smi plus a heap object has the low bit set, a heap object plus
   1312     // a heap object has the low bit clear.
   1313     STATIC_ASSERT(kSmiTag == 0);
   1314     STATIC_ASSERT(kSmiTagMask == 1);
   1315     __ lea(ecx, Operand(eax, edx, times_1, 0));
   1316     __ test(ecx, Immediate(kSmiTagMask));
   1317     __ j(not_zero, &runtime_call);
   1318 
   1319     __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
   1320     __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
   1321 
   1322     __ test_b(FieldOperand(ebx, Map::kBitFieldOffset),
   1323               Immediate(1 << Map::kIsUndetectable));
   1324     __ j(not_zero, &undetectable, Label::kNear);
   1325     __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
   1326               Immediate(1 << Map::kIsUndetectable));
   1327     __ j(not_zero, &return_unequal, Label::kNear);
   1328 
   1329     __ CmpInstanceType(ebx, FIRST_JS_RECEIVER_TYPE);
   1330     __ j(below, &runtime_call, Label::kNear);
   1331     __ CmpInstanceType(ecx, FIRST_JS_RECEIVER_TYPE);
   1332     __ j(below, &runtime_call, Label::kNear);
   1333 
   1334     __ bind(&return_unequal);
   1335     // Return non-equal by returning the non-zero object pointer in eax.
   1336     __ ret(0);  // eax, edx were pushed
   1337 
   1338     __ bind(&undetectable);
   1339     __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
   1340               Immediate(1 << Map::kIsUndetectable));
   1341     __ j(zero, &return_unequal, Label::kNear);
   1342 
   1343     // If both sides are JSReceivers, then the result is false according to
   1344     // the HTML specification, which says that only comparisons with null or
   1345     // undefined are affected by special casing for document.all.
   1346     __ CmpInstanceType(ebx, ODDBALL_TYPE);
   1347     __ j(zero, &return_equal, Label::kNear);
   1348     __ CmpInstanceType(ecx, ODDBALL_TYPE);
   1349     __ j(not_zero, &return_unequal, Label::kNear);
   1350 
   1351     __ bind(&return_equal);
   1352     __ Move(eax, Immediate(EQUAL));
   1353     __ ret(0);  // eax, edx were pushed
   1354   }
   1355   __ bind(&runtime_call);
   1356 
   1357   if (cc == equal) {
   1358     {
   1359       FrameScope scope(masm, StackFrame::INTERNAL);
   1360       __ Push(edx);
   1361       __ Push(eax);
   1362       __ CallRuntime(strict() ? Runtime::kStrictEqual : Runtime::kEqual);
   1363     }
   1364     // Turn true into 0 and false into some non-zero value.
   1365     STATIC_ASSERT(EQUAL == 0);
   1366     __ sub(eax, Immediate(isolate()->factory()->true_value()));
   1367     __ Ret();
   1368   } else {
   1369     // Push arguments below the return address.
   1370     __ pop(ecx);
   1371     __ push(edx);
   1372     __ push(eax);
   1373     __ push(Immediate(Smi::FromInt(NegativeComparisonResult(cc))));
   1374     __ push(ecx);
   1375     // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
   1376     // tagged as a small integer.
   1377     __ TailCallRuntime(Runtime::kCompare);
   1378   }
   1379 
   1380   __ bind(&miss);
   1381   GenerateMiss(masm);
   1382 }
   1383 
   1384 
   1385 static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub) {
   1386   // eax : number of arguments to the construct function
   1387   // ebx : feedback vector
   1388   // edx : slot in feedback vector (Smi)
   1389   // edi : the function to call
   1390 
   1391   {
   1392     FrameScope scope(masm, StackFrame::INTERNAL);
   1393 
   1394     // Number-of-arguments register must be smi-tagged to call out.
   1395     __ SmiTag(eax);
   1396     __ push(eax);
   1397     __ push(edi);
   1398     __ push(edx);
   1399     __ push(ebx);
   1400 
   1401     __ CallStub(stub);
   1402 
   1403     __ pop(ebx);
   1404     __ pop(edx);
   1405     __ pop(edi);
   1406     __ pop(eax);
   1407     __ SmiUntag(eax);
   1408   }
   1409 }
   1410 
   1411 
   1412 static void GenerateRecordCallTarget(MacroAssembler* masm) {
   1413   // Cache the called function in a feedback vector slot.  Cache states
   1414   // are uninitialized, monomorphic (indicated by a JSFunction), and
   1415   // megamorphic.
   1416   // eax : number of arguments to the construct function
   1417   // ebx : feedback vector
   1418   // edx : slot in feedback vector (Smi)
   1419   // edi : the function to call
   1420   Isolate* isolate = masm->isolate();
   1421   Label initialize, done, miss, megamorphic, not_array_function;
   1422   Label done_increment_count, done_initialize_count;
   1423 
   1424   // Load the cache state into ecx.
   1425   __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size,
   1426                            FixedArray::kHeaderSize));
   1427 
   1428   // A monomorphic cache hit or an already megamorphic state: invoke the
   1429   // function without changing the state.
   1430   // We don't know if ecx is a WeakCell or a Symbol, but it's harmless to read
   1431   // at this position in a symbol (see static asserts in
   1432   // type-feedback-vector.h).
   1433   Label check_allocation_site;
   1434   __ cmp(edi, FieldOperand(ecx, WeakCell::kValueOffset));
   1435   __ j(equal, &done_increment_count, Label::kFar);
   1436   __ CompareRoot(ecx, Heap::kmegamorphic_symbolRootIndex);
   1437   __ j(equal, &done, Label::kFar);
   1438   __ CompareRoot(FieldOperand(ecx, HeapObject::kMapOffset),
   1439                  Heap::kWeakCellMapRootIndex);
   1440   __ j(not_equal, &check_allocation_site);
   1441 
   1442   // If the weak cell is cleared, we have a new chance to become monomorphic.
   1443   __ JumpIfSmi(FieldOperand(ecx, WeakCell::kValueOffset), &initialize);
   1444   __ jmp(&megamorphic);
   1445 
   1446   __ bind(&check_allocation_site);
   1447   // If we came here, we need to see if we are the array function.
   1448   // If we didn't have a matching function, and we didn't find the megamorph
   1449   // sentinel, then we have in the slot either some other function or an
   1450   // AllocationSite.
   1451   __ CompareRoot(FieldOperand(ecx, 0), Heap::kAllocationSiteMapRootIndex);
   1452   __ j(not_equal, &miss);
   1453 
   1454   // Make sure the function is the Array() function
   1455   __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
   1456   __ cmp(edi, ecx);
   1457   __ j(not_equal, &megamorphic);
   1458   __ jmp(&done_increment_count, Label::kFar);
   1459 
   1460   __ bind(&miss);
   1461 
   1462   // A monomorphic miss (i.e, here the cache is not uninitialized) goes
   1463   // megamorphic.
   1464   __ CompareRoot(ecx, Heap::kuninitialized_symbolRootIndex);
   1465   __ j(equal, &initialize);
   1466   // MegamorphicSentinel is an immortal immovable object (undefined) so no
   1467   // write-barrier is needed.
   1468   __ bind(&megamorphic);
   1469   __ mov(
   1470       FieldOperand(ebx, edx, times_half_pointer_size, FixedArray::kHeaderSize),
   1471       Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate)));
   1472   __ jmp(&done, Label::kFar);
   1473 
   1474   // An uninitialized cache is patched with the function or sentinel to
   1475   // indicate the ElementsKind if function is the Array constructor.
   1476   __ bind(&initialize);
   1477   // Make sure the function is the Array() function
   1478   __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
   1479   __ cmp(edi, ecx);
   1480   __ j(not_equal, &not_array_function);
   1481 
   1482   // The target function is the Array constructor,
   1483   // Create an AllocationSite if we don't already have it, store it in the
   1484   // slot.
   1485   CreateAllocationSiteStub create_stub(isolate);
   1486   CallStubInRecordCallTarget(masm, &create_stub);
   1487   __ jmp(&done_initialize_count);
   1488 
   1489   __ bind(&not_array_function);
   1490   CreateWeakCellStub weak_cell_stub(isolate);
   1491   CallStubInRecordCallTarget(masm, &weak_cell_stub);
   1492   __ bind(&done_initialize_count);
   1493 
   1494   // Initialize the call counter.
   1495   __ mov(FieldOperand(ebx, edx, times_half_pointer_size,
   1496                       FixedArray::kHeaderSize + kPointerSize),
   1497          Immediate(Smi::FromInt(1)));
   1498   __ jmp(&done);
   1499 
   1500   __ bind(&done_increment_count);
   1501   // Increment the call count for monomorphic function calls.
   1502   __ add(FieldOperand(ebx, edx, times_half_pointer_size,
   1503                       FixedArray::kHeaderSize + kPointerSize),
   1504          Immediate(Smi::FromInt(1)));
   1505 
   1506   __ bind(&done);
   1507 }
   1508 
   1509 
   1510 void CallConstructStub::Generate(MacroAssembler* masm) {
   1511   // eax : number of arguments
   1512   // ebx : feedback vector
   1513   // edx : slot in feedback vector (Smi, for RecordCallTarget)
   1514   // edi : constructor function
   1515 
   1516   Label non_function;
   1517   // Check that function is not a smi.
   1518   __ JumpIfSmi(edi, &non_function);
   1519   // Check that function is a JSFunction.
   1520   __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
   1521   __ j(not_equal, &non_function);
   1522 
   1523   GenerateRecordCallTarget(masm);
   1524 
   1525   Label feedback_register_initialized;
   1526   // Put the AllocationSite from the feedback vector into ebx, or undefined.
   1527   __ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size,
   1528                            FixedArray::kHeaderSize));
   1529   Handle<Map> allocation_site_map = isolate()->factory()->allocation_site_map();
   1530   __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map));
   1531   __ j(equal, &feedback_register_initialized);
   1532   __ mov(ebx, isolate()->factory()->undefined_value());
   1533   __ bind(&feedback_register_initialized);
   1534 
   1535   __ AssertUndefinedOrAllocationSite(ebx);
   1536 
   1537   // Pass new target to construct stub.
   1538   __ mov(edx, edi);
   1539 
   1540   // Tail call to the function-specific construct stub (still in the caller
   1541   // context at this point).
   1542   __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
   1543   __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kConstructStubOffset));
   1544   __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize));
   1545   __ jmp(ecx);
   1546 
   1547   __ bind(&non_function);
   1548   __ mov(edx, edi);
   1549   __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
   1550 }
   1551 
   1552 
   1553 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
   1554   // edi - function
   1555   // edx - slot id
   1556   // ebx - vector
   1557   __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
   1558   __ cmp(edi, ecx);
   1559   __ j(not_equal, miss);
   1560 
   1561   __ mov(eax, arg_count());
   1562   // Reload ecx.
   1563   __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size,
   1564                            FixedArray::kHeaderSize));
   1565 
   1566   // Increment the call count for monomorphic function calls.
   1567   __ add(FieldOperand(ebx, edx, times_half_pointer_size,
   1568                       FixedArray::kHeaderSize + kPointerSize),
   1569          Immediate(Smi::FromInt(1)));
   1570 
   1571   __ mov(ebx, ecx);
   1572   __ mov(edx, edi);
   1573   ArrayConstructorStub stub(masm->isolate(), arg_count());
   1574   __ TailCallStub(&stub);
   1575 
   1576   // Unreachable.
   1577 }
   1578 
   1579 
   1580 void CallICStub::Generate(MacroAssembler* masm) {
   1581   // edi - function
   1582   // edx - slot id
   1583   // ebx - vector
   1584   Isolate* isolate = masm->isolate();
   1585   Label extra_checks_or_miss, call, call_function;
   1586   int argc = arg_count();
   1587   ParameterCount actual(argc);
   1588 
   1589   // The checks. First, does edi match the recorded monomorphic target?
   1590   __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size,
   1591                            FixedArray::kHeaderSize));
   1592 
   1593   // We don't know that we have a weak cell. We might have a private symbol
   1594   // or an AllocationSite, but the memory is safe to examine.
   1595   // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to
   1596   // FixedArray.
   1597   // WeakCell::kValueOffset - contains a JSFunction or Smi(0)
   1598   // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not
   1599   // computed, meaning that it can't appear to be a pointer. If the low bit is
   1600   // 0, then hash is computed, but the 0 bit prevents the field from appearing
   1601   // to be a pointer.
   1602   STATIC_ASSERT(WeakCell::kSize >= kPointerSize);
   1603   STATIC_ASSERT(AllocationSite::kTransitionInfoOffset ==
   1604                     WeakCell::kValueOffset &&
   1605                 WeakCell::kValueOffset == Symbol::kHashFieldSlot);
   1606 
   1607   __ cmp(edi, FieldOperand(ecx, WeakCell::kValueOffset));
   1608   __ j(not_equal, &extra_checks_or_miss);
   1609 
   1610   // The compare above could have been a SMI/SMI comparison. Guard against this
   1611   // convincing us that we have a monomorphic JSFunction.
   1612   __ JumpIfSmi(edi, &extra_checks_or_miss);
   1613 
   1614   // Increment the call count for monomorphic function calls.
   1615   __ add(FieldOperand(ebx, edx, times_half_pointer_size,
   1616                       FixedArray::kHeaderSize + kPointerSize),
   1617          Immediate(Smi::FromInt(1)));
   1618 
   1619   __ bind(&call_function);
   1620   __ Set(eax, argc);
   1621   __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(),
   1622                                                     tail_call_mode()),
   1623           RelocInfo::CODE_TARGET);
   1624 
   1625   __ bind(&extra_checks_or_miss);
   1626   Label uninitialized, miss, not_allocation_site;
   1627 
   1628   __ cmp(ecx, Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate)));
   1629   __ j(equal, &call);
   1630 
   1631   // Check if we have an allocation site.
   1632   __ CompareRoot(FieldOperand(ecx, HeapObject::kMapOffset),
   1633                  Heap::kAllocationSiteMapRootIndex);
   1634   __ j(not_equal, &not_allocation_site);
   1635 
   1636   // We have an allocation site.
   1637   HandleArrayCase(masm, &miss);
   1638 
   1639   __ bind(&not_allocation_site);
   1640 
   1641   // The following cases attempt to handle MISS cases without going to the
   1642   // runtime.
   1643   if (FLAG_trace_ic) {
   1644     __ jmp(&miss);
   1645   }
   1646 
   1647   __ cmp(ecx, Immediate(TypeFeedbackVector::UninitializedSentinel(isolate)));
   1648   __ j(equal, &uninitialized);
   1649 
   1650   // We are going megamorphic. If the feedback is a JSFunction, it is fine
   1651   // to handle it here. More complex cases are dealt with in the runtime.
   1652   __ AssertNotSmi(ecx);
   1653   __ CmpObjectType(ecx, JS_FUNCTION_TYPE, ecx);
   1654   __ j(not_equal, &miss);
   1655   __ mov(
   1656       FieldOperand(ebx, edx, times_half_pointer_size, FixedArray::kHeaderSize),
   1657       Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate)));
   1658 
   1659   __ bind(&call);
   1660   __ Set(eax, argc);
   1661   __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()),
   1662           RelocInfo::CODE_TARGET);
   1663 
   1664   __ bind(&uninitialized);
   1665 
   1666   // We are going monomorphic, provided we actually have a JSFunction.
   1667   __ JumpIfSmi(edi, &miss);
   1668 
   1669   // Goto miss case if we do not have a function.
   1670   __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
   1671   __ j(not_equal, &miss);
   1672 
   1673   // Make sure the function is not the Array() function, which requires special
   1674   // behavior on MISS.
   1675   __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
   1676   __ cmp(edi, ecx);
   1677   __ j(equal, &miss);
   1678 
   1679   // Make sure the function belongs to the same native context.
   1680   __ mov(ecx, FieldOperand(edi, JSFunction::kContextOffset));
   1681   __ mov(ecx, ContextOperand(ecx, Context::NATIVE_CONTEXT_INDEX));
   1682   __ cmp(ecx, NativeContextOperand());
   1683   __ j(not_equal, &miss);
   1684 
   1685   // Initialize the call counter.
   1686   __ mov(FieldOperand(ebx, edx, times_half_pointer_size,
   1687                       FixedArray::kHeaderSize + kPointerSize),
   1688          Immediate(Smi::FromInt(1)));
   1689 
   1690   // Store the function. Use a stub since we need a frame for allocation.
   1691   // ebx - vector
   1692   // edx - slot
   1693   // edi - function
   1694   {
   1695     FrameScope scope(masm, StackFrame::INTERNAL);
   1696     CreateWeakCellStub create_stub(isolate);
   1697     __ push(edi);
   1698     __ CallStub(&create_stub);
   1699     __ pop(edi);
   1700   }
   1701 
   1702   __ jmp(&call_function);
   1703 
   1704   // We are here because tracing is on or we encountered a MISS case we can't
   1705   // handle here.
   1706   __ bind(&miss);
   1707   GenerateMiss(masm);
   1708 
   1709   __ jmp(&call);
   1710 
   1711   // Unreachable
   1712   __ int3();
   1713 }
   1714 
   1715 
   1716 void CallICStub::GenerateMiss(MacroAssembler* masm) {
   1717   FrameScope scope(masm, StackFrame::INTERNAL);
   1718 
   1719   // Push the function and feedback info.
   1720   __ push(edi);
   1721   __ push(ebx);
   1722   __ push(edx);
   1723 
   1724   // Call the entry.
   1725   __ CallRuntime(Runtime::kCallIC_Miss);
   1726 
   1727   // Move result to edi and exit the internal frame.
   1728   __ mov(edi, eax);
   1729 }
   1730 
   1731 
   1732 bool CEntryStub::NeedsImmovableCode() {
   1733   return false;
   1734 }
   1735 
   1736 
   1737 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
   1738   CEntryStub::GenerateAheadOfTime(isolate);
   1739   StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
   1740   StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
   1741   // It is important that the store buffer overflow stubs are generated first.
   1742   CommonArrayConstructorStub::GenerateStubsAheadOfTime(isolate);
   1743   CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
   1744   CreateWeakCellStub::GenerateAheadOfTime(isolate);
   1745   BinaryOpICStub::GenerateAheadOfTime(isolate);
   1746   BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
   1747   StoreFastElementStub::GenerateAheadOfTime(isolate);
   1748   TypeofStub::GenerateAheadOfTime(isolate);
   1749 }
   1750 
   1751 
   1752 void CodeStub::GenerateFPStubs(Isolate* isolate) {
   1753   // Generate if not already in cache.
   1754   CEntryStub(isolate, 1, kSaveFPRegs).GetCode();
   1755   isolate->set_fp_stubs_generated(true);
   1756 }
   1757 
   1758 
   1759 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
   1760   CEntryStub stub(isolate, 1, kDontSaveFPRegs);
   1761   stub.GetCode();
   1762 }
   1763 
   1764 
   1765 void CEntryStub::Generate(MacroAssembler* masm) {
   1766   // eax: number of arguments including receiver
   1767   // ebx: pointer to C function  (C callee-saved)
   1768   // ebp: frame pointer  (restored after C call)
   1769   // esp: stack pointer  (restored after C call)
   1770   // esi: current context (C callee-saved)
   1771   // edi: JS function of the caller (C callee-saved)
   1772   //
   1773   // If argv_in_register():
   1774   // ecx: pointer to the first argument
   1775 
   1776   ProfileEntryHookStub::MaybeCallEntryHook(masm);
   1777 
   1778   // Reserve space on the stack for the three arguments passed to the call. If
   1779   // result size is greater than can be returned in registers, also reserve
   1780   // space for the hidden argument for the result location, and space for the
   1781   // result itself.
   1782   int arg_stack_space = result_size() < 3 ? 3 : 4 + result_size();
   1783 
   1784   // Enter the exit frame that transitions from JavaScript to C++.
   1785   if (argv_in_register()) {
   1786     DCHECK(!save_doubles());
   1787     __ EnterApiExitFrame(arg_stack_space);
   1788 
   1789     // Move argc and argv into the correct registers.
   1790     __ mov(esi, ecx);
   1791     __ mov(edi, eax);
   1792   } else {
   1793     __ EnterExitFrame(arg_stack_space, save_doubles());
   1794   }
   1795 
   1796   // ebx: pointer to C function  (C callee-saved)
   1797   // ebp: frame pointer  (restored after C call)
   1798   // esp: stack pointer  (restored after C call)
   1799   // edi: number of arguments including receiver  (C callee-saved)
   1800   // esi: pointer to the first argument (C callee-saved)
   1801 
   1802   // Result returned in eax, or eax+edx if result size is 2.
   1803 
   1804   // Check stack alignment.
   1805   if (FLAG_debug_code) {
   1806     __ CheckStackAlignment();
   1807   }
   1808   // Call C function.
   1809   if (result_size() <= 2) {
   1810     __ mov(Operand(esp, 0 * kPointerSize), edi);  // argc.
   1811     __ mov(Operand(esp, 1 * kPointerSize), esi);  // argv.
   1812     __ mov(Operand(esp, 2 * kPointerSize),
   1813            Immediate(ExternalReference::isolate_address(isolate())));
   1814   } else {
   1815     DCHECK_EQ(3, result_size());
   1816     // Pass a pointer to the result location as the first argument.
   1817     __ lea(eax, Operand(esp, 4 * kPointerSize));
   1818     __ mov(Operand(esp, 0 * kPointerSize), eax);
   1819     __ mov(Operand(esp, 1 * kPointerSize), edi);  // argc.
   1820     __ mov(Operand(esp, 2 * kPointerSize), esi);  // argv.
   1821     __ mov(Operand(esp, 3 * kPointerSize),
   1822            Immediate(ExternalReference::isolate_address(isolate())));
   1823   }
   1824   __ call(ebx);
   1825 
   1826   if (result_size() > 2) {
   1827     DCHECK_EQ(3, result_size());
   1828 #ifndef _WIN32
   1829     // Restore the "hidden" argument on the stack which was popped by caller.
   1830     __ sub(esp, Immediate(kPointerSize));
   1831 #endif
   1832     // Read result values stored on stack. Result is stored above the arguments.
   1833     __ mov(kReturnRegister0, Operand(esp, 4 * kPointerSize));
   1834     __ mov(kReturnRegister1, Operand(esp, 5 * kPointerSize));
   1835     __ mov(kReturnRegister2, Operand(esp, 6 * kPointerSize));
   1836   }
   1837   // Result is in eax, edx:eax or edi:edx:eax - do not destroy these registers!
   1838 
   1839   // Check result for exception sentinel.
   1840   Label exception_returned;
   1841   __ cmp(eax, isolate()->factory()->exception());
   1842   __ j(equal, &exception_returned);
   1843 
   1844   // Check that there is no pending exception, otherwise we
   1845   // should have returned the exception sentinel.
   1846   if (FLAG_debug_code) {
   1847     __ push(edx);
   1848     __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
   1849     Label okay;
   1850     ExternalReference pending_exception_address(
   1851         Isolate::kPendingExceptionAddress, isolate());
   1852     __ cmp(edx, Operand::StaticVariable(pending_exception_address));
   1853     // Cannot use check here as it attempts to generate call into runtime.
   1854     __ j(equal, &okay, Label::kNear);
   1855     __ int3();
   1856     __ bind(&okay);
   1857     __ pop(edx);
   1858   }
   1859 
   1860   // Exit the JavaScript to C++ exit frame.
   1861   __ LeaveExitFrame(save_doubles(), !argv_in_register());
   1862   __ ret(0);
   1863 
   1864   // Handling of exception.
   1865   __ bind(&exception_returned);
   1866 
   1867   ExternalReference pending_handler_context_address(
   1868       Isolate::kPendingHandlerContextAddress, isolate());
   1869   ExternalReference pending_handler_code_address(
   1870       Isolate::kPendingHandlerCodeAddress, isolate());
   1871   ExternalReference pending_handler_offset_address(
   1872       Isolate::kPendingHandlerOffsetAddress, isolate());
   1873   ExternalReference pending_handler_fp_address(
   1874       Isolate::kPendingHandlerFPAddress, isolate());
   1875   ExternalReference pending_handler_sp_address(
   1876       Isolate::kPendingHandlerSPAddress, isolate());
   1877 
   1878   // Ask the runtime for help to determine the handler. This will set eax to
   1879   // contain the current pending exception, don't clobber it.
   1880   ExternalReference find_handler(Runtime::kUnwindAndFindExceptionHandler,
   1881                                  isolate());
   1882   {
   1883     FrameScope scope(masm, StackFrame::MANUAL);
   1884     __ PrepareCallCFunction(3, eax);
   1885     __ mov(Operand(esp, 0 * kPointerSize), Immediate(0));  // argc.
   1886     __ mov(Operand(esp, 1 * kPointerSize), Immediate(0));  // argv.
   1887     __ mov(Operand(esp, 2 * kPointerSize),
   1888            Immediate(ExternalReference::isolate_address(isolate())));
   1889     __ CallCFunction(find_handler, 3);
   1890   }
   1891 
   1892   // Retrieve the handler context, SP and FP.
   1893   __ mov(esi, Operand::StaticVariable(pending_handler_context_address));
   1894   __ mov(esp, Operand::StaticVariable(pending_handler_sp_address));
   1895   __ mov(ebp, Operand::StaticVariable(pending_handler_fp_address));
   1896 
   1897   // If the handler is a JS frame, restore the context to the frame. Note that
   1898   // the context will be set to (esi == 0) for non-JS frames.
   1899   Label skip;
   1900   __ test(esi, esi);
   1901   __ j(zero, &skip, Label::kNear);
   1902   __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
   1903   __ bind(&skip);
   1904 
   1905   // Compute the handler entry address and jump to it.
   1906   __ mov(edi, Operand::StaticVariable(pending_handler_code_address));
   1907   __ mov(edx, Operand::StaticVariable(pending_handler_offset_address));
   1908   __ lea(edi, FieldOperand(edi, edx, times_1, Code::kHeaderSize));
   1909   __ jmp(edi);
   1910 }
   1911 
   1912 
   1913 void JSEntryStub::Generate(MacroAssembler* masm) {
   1914   Label invoke, handler_entry, exit;
   1915   Label not_outermost_js, not_outermost_js_2;
   1916 
   1917   ProfileEntryHookStub::MaybeCallEntryHook(masm);
   1918 
   1919   // Set up frame.
   1920   __ push(ebp);
   1921   __ mov(ebp, esp);
   1922 
   1923   // Push marker in two places.
   1924   int marker = type();
   1925   __ push(Immediate(Smi::FromInt(marker)));  // marker
   1926   ExternalReference context_address(Isolate::kContextAddress, isolate());
   1927   __ push(Operand::StaticVariable(context_address));  // context
   1928   // Save callee-saved registers (C calling conventions).
   1929   __ push(edi);
   1930   __ push(esi);
   1931   __ push(ebx);
   1932 
   1933   // Save copies of the top frame descriptor on the stack.
   1934   ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate());
   1935   __ push(Operand::StaticVariable(c_entry_fp));
   1936 
   1937   // If this is the outermost JS call, set js_entry_sp value.
   1938   ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate());
   1939   __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0));
   1940   __ j(not_equal, &not_outermost_js, Label::kNear);
   1941   __ mov(Operand::StaticVariable(js_entry_sp), ebp);
   1942   __ push(Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
   1943   __ jmp(&invoke, Label::kNear);
   1944   __ bind(&not_outermost_js);
   1945   __ push(Immediate(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
   1946 
   1947   // Jump to a faked try block that does the invoke, with a faked catch
   1948   // block that sets the pending exception.
   1949   __ jmp(&invoke);
   1950   __ bind(&handler_entry);
   1951   handler_offset_ = handler_entry.pos();
   1952   // Caught exception: Store result (exception) in the pending exception
   1953   // field in the JSEnv and return a failure sentinel.
   1954   ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
   1955                                       isolate());
   1956   __ mov(Operand::StaticVariable(pending_exception), eax);
   1957   __ mov(eax, Immediate(isolate()->factory()->exception()));
   1958   __ jmp(&exit);
   1959 
   1960   // Invoke: Link this frame into the handler chain.
   1961   __ bind(&invoke);
   1962   __ PushStackHandler();
   1963 
   1964   // Clear any pending exceptions.
   1965   __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
   1966   __ mov(Operand::StaticVariable(pending_exception), edx);
   1967 
   1968   // Fake a receiver (NULL).
   1969   __ push(Immediate(0));  // receiver
   1970 
   1971   // Invoke the function by calling through JS entry trampoline builtin and
   1972   // pop the faked function when we return. Notice that we cannot store a
   1973   // reference to the trampoline code directly in this stub, because the
   1974   // builtin stubs may not have been generated yet.
   1975   if (type() == StackFrame::ENTRY_CONSTRUCT) {
   1976     ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
   1977                                       isolate());
   1978     __ mov(edx, Immediate(construct_entry));
   1979   } else {
   1980     ExternalReference entry(Builtins::kJSEntryTrampoline, isolate());
   1981     __ mov(edx, Immediate(entry));
   1982   }
   1983   __ mov(edx, Operand(edx, 0));  // deref address
   1984   __ lea(edx, FieldOperand(edx, Code::kHeaderSize));
   1985   __ call(edx);
   1986 
   1987   // Unlink this frame from the handler chain.
   1988   __ PopStackHandler();
   1989 
   1990   __ bind(&exit);
   1991   // Check if the current stack frame is marked as the outermost JS frame.
   1992   __ pop(ebx);
   1993   __ cmp(ebx, Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
   1994   __ j(not_equal, &not_outermost_js_2);
   1995   __ mov(Operand::StaticVariable(js_entry_sp), Immediate(0));
   1996   __ bind(&not_outermost_js_2);
   1997 
   1998   // Restore the top frame descriptor from the stack.
   1999   __ pop(Operand::StaticVariable(ExternalReference(
   2000       Isolate::kCEntryFPAddress, isolate())));
   2001 
   2002   // Restore callee-saved registers (C calling conventions).
   2003   __ pop(ebx);
   2004   __ pop(esi);
   2005   __ pop(edi);
   2006   __ add(esp, Immediate(2 * kPointerSize));  // remove markers
   2007 
   2008   // Restore frame pointer and return.
   2009   __ pop(ebp);
   2010   __ ret(0);
   2011 }
   2012 
   2013 
   2014 // -------------------------------------------------------------------------
   2015 // StringCharCodeAtGenerator
   2016 
   2017 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
   2018   // If the receiver is a smi trigger the non-string case.
   2019   STATIC_ASSERT(kSmiTag == 0);
   2020   if (check_mode_ == RECEIVER_IS_UNKNOWN) {
   2021     __ JumpIfSmi(object_, receiver_not_string_);
   2022 
   2023     // Fetch the instance type of the receiver into result register.
   2024     __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
   2025     __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
   2026     // If the receiver is not a string trigger the non-string case.
   2027     __ test(result_, Immediate(kIsNotStringMask));
   2028     __ j(not_zero, receiver_not_string_);
   2029   }
   2030 
   2031   // If the index is non-smi trigger the non-smi case.
   2032   STATIC_ASSERT(kSmiTag == 0);
   2033   __ JumpIfNotSmi(index_, &index_not_smi_);
   2034   __ bind(&got_smi_index_);
   2035 
   2036   // Check for index out of range.
   2037   __ cmp(index_, FieldOperand(object_, String::kLengthOffset));
   2038   __ j(above_equal, index_out_of_range_);
   2039 
   2040   __ SmiUntag(index_);
   2041 
   2042   Factory* factory = masm->isolate()->factory();
   2043   StringCharLoadGenerator::Generate(
   2044       masm, factory, object_, index_, result_, &call_runtime_);
   2045 
   2046   __ SmiTag(result_);
   2047   __ bind(&exit_);
   2048 }
   2049 
   2050 
   2051 void StringCharCodeAtGenerator::GenerateSlow(
   2052     MacroAssembler* masm, EmbedMode embed_mode,
   2053     const RuntimeCallHelper& call_helper) {
   2054   __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
   2055 
   2056   // Index is not a smi.
   2057   __ bind(&index_not_smi_);
   2058   // If index is a heap number, try converting it to an integer.
   2059   __ CheckMap(index_,
   2060               masm->isolate()->factory()->heap_number_map(),
   2061               index_not_number_,
   2062               DONT_DO_SMI_CHECK);
   2063   call_helper.BeforeCall(masm);
   2064   if (embed_mode == PART_OF_IC_HANDLER) {
   2065     __ push(LoadWithVectorDescriptor::VectorRegister());
   2066     __ push(LoadDescriptor::SlotRegister());
   2067   }
   2068   __ push(object_);
   2069   __ push(index_);  // Consumed by runtime conversion function.
   2070   __ CallRuntime(Runtime::kNumberToSmi);
   2071   if (!index_.is(eax)) {
   2072     // Save the conversion result before the pop instructions below
   2073     // have a chance to overwrite it.
   2074     __ mov(index_, eax);
   2075   }
   2076   __ pop(object_);
   2077   if (embed_mode == PART_OF_IC_HANDLER) {
   2078     __ pop(LoadDescriptor::SlotRegister());
   2079     __ pop(LoadWithVectorDescriptor::VectorRegister());
   2080   }
   2081   // Reload the instance type.
   2082   __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
   2083   __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
   2084   call_helper.AfterCall(masm);
   2085   // If index is still not a smi, it must be out of range.
   2086   STATIC_ASSERT(kSmiTag == 0);
   2087   __ JumpIfNotSmi(index_, index_out_of_range_);
   2088   // Otherwise, return to the fast path.
   2089   __ jmp(&got_smi_index_);
   2090 
   2091   // Call runtime. We get here when the receiver is a string and the
   2092   // index is a number, but the code of getting the actual character
   2093   // is too complex (e.g., when the string needs to be flattened).
   2094   __ bind(&call_runtime_);
   2095   call_helper.BeforeCall(masm);
   2096   __ push(object_);
   2097   __ SmiTag(index_);
   2098   __ push(index_);
   2099   __ CallRuntime(Runtime::kStringCharCodeAtRT);
   2100   if (!result_.is(eax)) {
   2101     __ mov(result_, eax);
   2102   }
   2103   call_helper.AfterCall(masm);
   2104   __ jmp(&exit_);
   2105 
   2106   __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
   2107 }
   2108 
   2109 
   2110 // -------------------------------------------------------------------------
   2111 // StringCharFromCodeGenerator
   2112 
   2113 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
   2114   // Fast case of Heap::LookupSingleCharacterStringFromCode.
   2115   STATIC_ASSERT(kSmiTag == 0);
   2116   STATIC_ASSERT(kSmiShiftSize == 0);
   2117   DCHECK(base::bits::IsPowerOfTwo32(String::kMaxOneByteCharCodeU + 1));
   2118   __ test(code_, Immediate(kSmiTagMask |
   2119                            ((~String::kMaxOneByteCharCodeU) << kSmiTagSize)));
   2120   __ j(not_zero, &slow_case_);
   2121 
   2122   Factory* factory = masm->isolate()->factory();
   2123   __ Move(result_, Immediate(factory->single_character_string_cache()));
   2124   STATIC_ASSERT(kSmiTag == 0);
   2125   STATIC_ASSERT(kSmiTagSize == 1);
   2126   STATIC_ASSERT(kSmiShiftSize == 0);
   2127   // At this point code register contains smi tagged one byte char code.
   2128   __ mov(result_, FieldOperand(result_,
   2129                                code_, times_half_pointer_size,
   2130                                FixedArray::kHeaderSize));
   2131   __ cmp(result_, factory->undefined_value());
   2132   __ j(equal, &slow_case_);
   2133   __ bind(&exit_);
   2134 }
   2135 
   2136 
   2137 void StringCharFromCodeGenerator::GenerateSlow(
   2138     MacroAssembler* masm,
   2139     const RuntimeCallHelper& call_helper) {
   2140   __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
   2141 
   2142   __ bind(&slow_case_);
   2143   call_helper.BeforeCall(masm);
   2144   __ push(code_);
   2145   __ CallRuntime(Runtime::kStringCharFromCode);
   2146   if (!result_.is(eax)) {
   2147     __ mov(result_, eax);
   2148   }
   2149   call_helper.AfterCall(masm);
   2150   __ jmp(&exit_);
   2151 
   2152   __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
   2153 }
   2154 
   2155 
   2156 void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
   2157                                           Register dest,
   2158                                           Register src,
   2159                                           Register count,
   2160                                           Register scratch,
   2161                                           String::Encoding encoding) {
   2162   DCHECK(!scratch.is(dest));
   2163   DCHECK(!scratch.is(src));
   2164   DCHECK(!scratch.is(count));
   2165 
   2166   // Nothing to do for zero characters.
   2167   Label done;
   2168   __ test(count, count);
   2169   __ j(zero, &done);
   2170 
   2171   // Make count the number of bytes to copy.
   2172   if (encoding == String::TWO_BYTE_ENCODING) {
   2173     __ shl(count, 1);
   2174   }
   2175 
   2176   Label loop;
   2177   __ bind(&loop);
   2178   __ mov_b(scratch, Operand(src, 0));
   2179   __ mov_b(Operand(dest, 0), scratch);
   2180   __ inc(src);
   2181   __ inc(dest);
   2182   __ dec(count);
   2183   __ j(not_zero, &loop);
   2184 
   2185   __ bind(&done);
   2186 }
   2187 
   2188 
   2189 void SubStringStub::Generate(MacroAssembler* masm) {
   2190   Label runtime;
   2191 
   2192   // Stack frame on entry.
   2193   //  esp[0]: return address
   2194   //  esp[4]: to
   2195   //  esp[8]: from
   2196   //  esp[12]: string
   2197 
   2198   // Make sure first argument is a string.
   2199   __ mov(eax, Operand(esp, 3 * kPointerSize));
   2200   STATIC_ASSERT(kSmiTag == 0);
   2201   __ JumpIfSmi(eax, &runtime);
   2202   Condition is_string = masm->IsObjectStringType(eax, ebx, ebx);
   2203   __ j(NegateCondition(is_string), &runtime);
   2204 
   2205   // eax: string
   2206   // ebx: instance type
   2207 
   2208   // Calculate length of sub string using the smi values.
   2209   __ mov(ecx, Operand(esp, 1 * kPointerSize));  // To index.
   2210   __ JumpIfNotSmi(ecx, &runtime);
   2211   __ mov(edx, Operand(esp, 2 * kPointerSize));  // From index.
   2212   __ JumpIfNotSmi(edx, &runtime);
   2213   __ sub(ecx, edx);
   2214   __ cmp(ecx, FieldOperand(eax, String::kLengthOffset));
   2215   Label not_original_string;
   2216   // Shorter than original string's length: an actual substring.
   2217   __ j(below, &not_original_string, Label::kNear);
   2218   // Longer than original string's length or negative: unsafe arguments.
   2219   __ j(above, &runtime);
   2220   // Return original string.
   2221   Counters* counters = isolate()->counters();
   2222   __ IncrementCounter(counters->sub_string_native(), 1);
   2223   __ ret(3 * kPointerSize);
   2224   __ bind(&not_original_string);
   2225 
   2226   Label single_char;
   2227   __ cmp(ecx, Immediate(Smi::FromInt(1)));
   2228   __ j(equal, &single_char);
   2229 
   2230   // eax: string
   2231   // ebx: instance type
   2232   // ecx: sub string length (smi)
   2233   // edx: from index (smi)
   2234   // Deal with different string types: update the index if necessary
   2235   // and put the underlying string into edi.
   2236   Label underlying_unpacked, sliced_string, seq_or_external_string;
   2237   // If the string is not indirect, it can only be sequential or external.
   2238   STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
   2239   STATIC_ASSERT(kIsIndirectStringMask != 0);
   2240   __ test(ebx, Immediate(kIsIndirectStringMask));
   2241   __ j(zero, &seq_or_external_string, Label::kNear);
   2242 
   2243   Factory* factory = isolate()->factory();
   2244   __ test(ebx, Immediate(kSlicedNotConsMask));
   2245   __ j(not_zero, &sliced_string, Label::kNear);
   2246   // Cons string.  Check whether it is flat, then fetch first part.
   2247   // Flat cons strings have an empty second part.
   2248   __ cmp(FieldOperand(eax, ConsString::kSecondOffset),
   2249          factory->empty_string());
   2250   __ j(not_equal, &runtime);
   2251   __ mov(edi, FieldOperand(eax, ConsString::kFirstOffset));
   2252   // Update instance type.
   2253   __ mov(ebx, FieldOperand(edi, HeapObject::kMapOffset));
   2254   __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
   2255   __ jmp(&underlying_unpacked, Label::kNear);
   2256 
   2257   __ bind(&sliced_string);
   2258   // Sliced string.  Fetch parent and adjust start index by offset.
   2259   __ add(edx, FieldOperand(eax, SlicedString::kOffsetOffset));
   2260   __ mov(edi, FieldOperand(eax, SlicedString::kParentOffset));
   2261   // Update instance type.
   2262   __ mov(ebx, FieldOperand(edi, HeapObject::kMapOffset));
   2263   __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
   2264   __ jmp(&underlying_unpacked, Label::kNear);
   2265 
   2266   __ bind(&seq_or_external_string);
   2267   // Sequential or external string.  Just move string to the expected register.
   2268   __ mov(edi, eax);
   2269 
   2270   __ bind(&underlying_unpacked);
   2271 
   2272   if (FLAG_string_slices) {
   2273     Label copy_routine;
   2274     // edi: underlying subject string
   2275     // ebx: instance type of underlying subject string
   2276     // edx: adjusted start index (smi)
   2277     // ecx: length (smi)
   2278     __ cmp(ecx, Immediate(Smi::FromInt(SlicedString::kMinLength)));
   2279     // Short slice.  Copy instead of slicing.
   2280     __ j(less, &copy_routine);
   2281     // Allocate new sliced string.  At this point we do not reload the instance
   2282     // type including the string encoding because we simply rely on the info
   2283     // provided by the original string.  It does not matter if the original
   2284     // string's encoding is wrong because we always have to recheck encoding of
   2285     // the newly created string's parent anyways due to externalized strings.
   2286     Label two_byte_slice, set_slice_header;
   2287     STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
   2288     STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
   2289     __ test(ebx, Immediate(kStringEncodingMask));
   2290     __ j(zero, &two_byte_slice, Label::kNear);
   2291     __ AllocateOneByteSlicedString(eax, ebx, no_reg, &runtime);
   2292     __ jmp(&set_slice_header, Label::kNear);
   2293     __ bind(&two_byte_slice);
   2294     __ AllocateTwoByteSlicedString(eax, ebx, no_reg, &runtime);
   2295     __ bind(&set_slice_header);
   2296     __ mov(FieldOperand(eax, SlicedString::kLengthOffset), ecx);
   2297     __ mov(FieldOperand(eax, SlicedString::kHashFieldOffset),
   2298            Immediate(String::kEmptyHashField));
   2299     __ mov(FieldOperand(eax, SlicedString::kParentOffset), edi);
   2300     __ mov(FieldOperand(eax, SlicedString::kOffsetOffset), edx);
   2301     __ IncrementCounter(counters->sub_string_native(), 1);
   2302     __ ret(3 * kPointerSize);
   2303 
   2304     __ bind(&copy_routine);
   2305   }
   2306 
   2307   // edi: underlying subject string
   2308   // ebx: instance type of underlying subject string
   2309   // edx: adjusted start index (smi)
   2310   // ecx: length (smi)
   2311   // The subject string can only be external or sequential string of either
   2312   // encoding at this point.
   2313   Label two_byte_sequential, runtime_drop_two, sequential_string;
   2314   STATIC_ASSERT(kExternalStringTag != 0);
   2315   STATIC_ASSERT(kSeqStringTag == 0);
   2316   __ test_b(ebx, Immediate(kExternalStringTag));
   2317   __ j(zero, &sequential_string);
   2318 
   2319   // Handle external string.
   2320   // Rule out short external strings.
   2321   STATIC_ASSERT(kShortExternalStringTag != 0);
   2322   __ test_b(ebx, Immediate(kShortExternalStringMask));
   2323   __ j(not_zero, &runtime);
   2324   __ mov(edi, FieldOperand(edi, ExternalString::kResourceDataOffset));
   2325   // Move the pointer so that offset-wise, it looks like a sequential string.
   2326   STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
   2327   __ sub(edi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
   2328 
   2329   __ bind(&sequential_string);
   2330   // Stash away (adjusted) index and (underlying) string.
   2331   __ push(edx);
   2332   __ push(edi);
   2333   __ SmiUntag(ecx);
   2334   STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
   2335   __ test_b(ebx, Immediate(kStringEncodingMask));
   2336   __ j(zero, &two_byte_sequential);
   2337 
   2338   // Sequential one byte string.  Allocate the result.
   2339   __ AllocateOneByteString(eax, ecx, ebx, edx, edi, &runtime_drop_two);
   2340 
   2341   // eax: result string
   2342   // ecx: result string length
   2343   // Locate first character of result.
   2344   __ mov(edi, eax);
   2345   __ add(edi, Immediate(SeqOneByteString::kHeaderSize - kHeapObjectTag));
   2346   // Load string argument and locate character of sub string start.
   2347   __ pop(edx);
   2348   __ pop(ebx);
   2349   __ SmiUntag(ebx);
   2350   __ lea(edx, FieldOperand(edx, ebx, times_1, SeqOneByteString::kHeaderSize));
   2351 
   2352   // eax: result string
   2353   // ecx: result length
   2354   // edi: first character of result
   2355   // edx: character of sub string start
   2356   StringHelper::GenerateCopyCharacters(
   2357       masm, edi, edx, ecx, ebx, String::ONE_BYTE_ENCODING);
   2358   __ IncrementCounter(counters->sub_string_native(), 1);
   2359   __ ret(3 * kPointerSize);
   2360 
   2361   __ bind(&two_byte_sequential);
   2362   // Sequential two-byte string.  Allocate the result.
   2363   __ AllocateTwoByteString(eax, ecx, ebx, edx, edi, &runtime_drop_two);
   2364 
   2365   // eax: result string
   2366   // ecx: result string length
   2367   // Locate first character of result.
   2368   __ mov(edi, eax);
   2369   __ add(edi,
   2370          Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
   2371   // Load string argument and locate character of sub string start.
   2372   __ pop(edx);
   2373   __ pop(ebx);
   2374   // As from is a smi it is 2 times the value which matches the size of a two
   2375   // byte character.
   2376   STATIC_ASSERT(kSmiTag == 0);
   2377   STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
   2378   __ lea(edx, FieldOperand(edx, ebx, times_1, SeqTwoByteString::kHeaderSize));
   2379 
   2380   // eax: result string
   2381   // ecx: result length
   2382   // edi: first character of result
   2383   // edx: character of sub string start
   2384   StringHelper::GenerateCopyCharacters(
   2385       masm, edi, edx, ecx, ebx, String::TWO_BYTE_ENCODING);
   2386   __ IncrementCounter(counters->sub_string_native(), 1);
   2387   __ ret(3 * kPointerSize);
   2388 
   2389   // Drop pushed values on the stack before tail call.
   2390   __ bind(&runtime_drop_two);
   2391   __ Drop(2);
   2392 
   2393   // Just jump to runtime to create the sub string.
   2394   __ bind(&runtime);
   2395   __ TailCallRuntime(Runtime::kSubString);
   2396 
   2397   __ bind(&single_char);
   2398   // eax: string
   2399   // ebx: instance type
   2400   // ecx: sub string length (smi)
   2401   // edx: from index (smi)
   2402   StringCharAtGenerator generator(eax, edx, ecx, eax, &runtime, &runtime,
   2403                                   &runtime, RECEIVER_IS_STRING);
   2404   generator.GenerateFast(masm);
   2405   __ ret(3 * kPointerSize);
   2406   generator.SkipSlow(masm, &runtime);
   2407 }
   2408 
   2409 void ToStringStub::Generate(MacroAssembler* masm) {
   2410   // The ToString stub takes one argument in eax.
   2411   Label is_number;
   2412   __ JumpIfSmi(eax, &is_number, Label::kNear);
   2413 
   2414   Label not_string;
   2415   __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edi);
   2416   // eax: receiver
   2417   // edi: receiver map
   2418   __ j(above_equal, &not_string, Label::kNear);
   2419   __ Ret();
   2420   __ bind(&not_string);
   2421 
   2422   Label not_heap_number;
   2423   __ CompareMap(eax, masm->isolate()->factory()->heap_number_map());
   2424   __ j(not_equal, &not_heap_number, Label::kNear);
   2425   __ bind(&is_number);
   2426   NumberToStringStub stub(isolate());
   2427   __ TailCallStub(&stub);
   2428   __ bind(&not_heap_number);
   2429 
   2430   Label not_oddball;
   2431   __ CmpInstanceType(edi, ODDBALL_TYPE);
   2432   __ j(not_equal, &not_oddball, Label::kNear);
   2433   __ mov(eax, FieldOperand(eax, Oddball::kToStringOffset));
   2434   __ Ret();
   2435   __ bind(&not_oddball);
   2436 
   2437   __ pop(ecx);   // Pop return address.
   2438   __ push(eax);  // Push argument.
   2439   __ push(ecx);  // Push return address.
   2440   __ TailCallRuntime(Runtime::kToString);
   2441 }
   2442 
   2443 
   2444 void ToNameStub::Generate(MacroAssembler* masm) {
   2445   // The ToName stub takes one argument in eax.
   2446   Label is_number;
   2447   __ JumpIfSmi(eax, &is_number, Label::kNear);
   2448 
   2449   Label not_name;
   2450   STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
   2451   __ CmpObjectType(eax, LAST_NAME_TYPE, edi);
   2452   // eax: receiver
   2453   // edi: receiver map
   2454   __ j(above, &not_name, Label::kNear);
   2455   __ Ret();
   2456   __ bind(&not_name);
   2457 
   2458   Label not_heap_number;
   2459   __ CompareMap(eax, masm->isolate()->factory()->heap_number_map());
   2460   __ j(not_equal, &not_heap_number, Label::kNear);
   2461   __ bind(&is_number);
   2462   NumberToStringStub stub(isolate());
   2463   __ TailCallStub(&stub);
   2464   __ bind(&not_heap_number);
   2465 
   2466   Label not_oddball;
   2467   __ CmpInstanceType(edi, ODDBALL_TYPE);
   2468   __ j(not_equal, &not_oddball, Label::kNear);
   2469   __ mov(eax, FieldOperand(eax, Oddball::kToStringOffset));
   2470   __ Ret();
   2471   __ bind(&not_oddball);
   2472 
   2473   __ pop(ecx);   // Pop return address.
   2474   __ push(eax);  // Push argument.
   2475   __ push(ecx);  // Push return address.
   2476   __ TailCallRuntime(Runtime::kToName);
   2477 }
   2478 
   2479 
   2480 void StringHelper::GenerateFlatOneByteStringEquals(MacroAssembler* masm,
   2481                                                    Register left,
   2482                                                    Register right,
   2483                                                    Register scratch1,
   2484                                                    Register scratch2) {
   2485   Register length = scratch1;
   2486 
   2487   // Compare lengths.
   2488   Label strings_not_equal, check_zero_length;
   2489   __ mov(length, FieldOperand(left, String::kLengthOffset));
   2490   __ cmp(length, FieldOperand(right, String::kLengthOffset));
   2491   __ j(equal, &check_zero_length, Label::kNear);
   2492   __ bind(&strings_not_equal);
   2493   __ Move(eax, Immediate(Smi::FromInt(NOT_EQUAL)));
   2494   __ ret(0);
   2495 
   2496   // Check if the length is zero.
   2497   Label compare_chars;
   2498   __ bind(&check_zero_length);
   2499   STATIC_ASSERT(kSmiTag == 0);
   2500   __ test(length, length);
   2501   __ j(not_zero, &compare_chars, Label::kNear);
   2502   __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
   2503   __ ret(0);
   2504 
   2505   // Compare characters.
   2506   __ bind(&compare_chars);
   2507   GenerateOneByteCharsCompareLoop(masm, left, right, length, scratch2,
   2508                                   &strings_not_equal, Label::kNear);
   2509 
   2510   // Characters are equal.
   2511   __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
   2512   __ ret(0);
   2513 }
   2514 
   2515 
   2516 void StringHelper::GenerateCompareFlatOneByteStrings(
   2517     MacroAssembler* masm, Register left, Register right, Register scratch1,
   2518     Register scratch2, Register scratch3) {
   2519   Counters* counters = masm->isolate()->counters();
   2520   __ IncrementCounter(counters->string_compare_native(), 1);
   2521 
   2522   // Find minimum length.
   2523   Label left_shorter;
   2524   __ mov(scratch1, FieldOperand(left, String::kLengthOffset));
   2525   __ mov(scratch3, scratch1);
   2526   __ sub(scratch3, FieldOperand(right, String::kLengthOffset));
   2527 
   2528   Register length_delta = scratch3;
   2529 
   2530   __ j(less_equal, &left_shorter, Label::kNear);
   2531   // Right string is shorter. Change scratch1 to be length of right string.
   2532   __ sub(scratch1, length_delta);
   2533   __ bind(&left_shorter);
   2534 
   2535   Register min_length = scratch1;
   2536 
   2537   // If either length is zero, just compare lengths.
   2538   Label compare_lengths;
   2539   __ test(min_length, min_length);
   2540   __ j(zero, &compare_lengths, Label::kNear);
   2541 
   2542   // Compare characters.
   2543   Label result_not_equal;
   2544   GenerateOneByteCharsCompareLoop(masm, left, right, min_length, scratch2,
   2545                                   &result_not_equal, Label::kNear);
   2546 
   2547   // Compare lengths -  strings up to min-length are equal.
   2548   __ bind(&compare_lengths);
   2549   __ test(length_delta, length_delta);
   2550   Label length_not_equal;
   2551   __ j(not_zero, &length_not_equal, Label::kNear);
   2552 
   2553   // Result is EQUAL.
   2554   STATIC_ASSERT(EQUAL == 0);
   2555   STATIC_ASSERT(kSmiTag == 0);
   2556   __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
   2557   __ ret(0);
   2558 
   2559   Label result_greater;
   2560   Label result_less;
   2561   __ bind(&length_not_equal);
   2562   __ j(greater, &result_greater, Label::kNear);
   2563   __ jmp(&result_less, Label::kNear);
   2564   __ bind(&result_not_equal);
   2565   __ j(above, &result_greater, Label::kNear);
   2566   __ bind(&result_less);
   2567 
   2568   // Result is LESS.
   2569   __ Move(eax, Immediate(Smi::FromInt(LESS)));
   2570   __ ret(0);
   2571 
   2572   // Result is GREATER.
   2573   __ bind(&result_greater);
   2574   __ Move(eax, Immediate(Smi::FromInt(GREATER)));
   2575   __ ret(0);
   2576 }
   2577 
   2578 
   2579 void StringHelper::GenerateOneByteCharsCompareLoop(
   2580     MacroAssembler* masm, Register left, Register right, Register length,
   2581     Register scratch, Label* chars_not_equal,
   2582     Label::Distance chars_not_equal_near) {
   2583   // Change index to run from -length to -1 by adding length to string
   2584   // start. This means that loop ends when index reaches zero, which
   2585   // doesn't need an additional compare.
   2586   __ SmiUntag(length);
   2587   __ lea(left,
   2588          FieldOperand(left, length, times_1, SeqOneByteString::kHeaderSize));
   2589   __ lea(right,
   2590          FieldOperand(right, length, times_1, SeqOneByteString::kHeaderSize));
   2591   __ neg(length);
   2592   Register index = length;  // index = -length;
   2593 
   2594   // Compare loop.
   2595   Label loop;
   2596   __ bind(&loop);
   2597   __ mov_b(scratch, Operand(left, index, times_1, 0));
   2598   __ cmpb(scratch, Operand(right, index, times_1, 0));
   2599   __ j(not_equal, chars_not_equal, chars_not_equal_near);
   2600   __ inc(index);
   2601   __ j(not_zero, &loop);
   2602 }
   2603 
   2604 
   2605 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
   2606   // ----------- S t a t e -------------
   2607   //  -- edx    : left
   2608   //  -- eax    : right
   2609   //  -- esp[0] : return address
   2610   // -----------------------------------
   2611 
   2612   // Load ecx with the allocation site.  We stick an undefined dummy value here
   2613   // and replace it with the real allocation site later when we instantiate this
   2614   // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
   2615   __ mov(ecx, isolate()->factory()->undefined_value());
   2616 
   2617   // Make sure that we actually patched the allocation site.
   2618   if (FLAG_debug_code) {
   2619     __ test(ecx, Immediate(kSmiTagMask));
   2620     __ Assert(not_equal, kExpectedAllocationSite);
   2621     __ cmp(FieldOperand(ecx, HeapObject::kMapOffset),
   2622            isolate()->factory()->allocation_site_map());
   2623     __ Assert(equal, kExpectedAllocationSite);
   2624   }
   2625 
   2626   // Tail call into the stub that handles binary operations with allocation
   2627   // sites.
   2628   BinaryOpWithAllocationSiteStub stub(isolate(), state());
   2629   __ TailCallStub(&stub);
   2630 }
   2631 
   2632 
   2633 void CompareICStub::GenerateBooleans(MacroAssembler* masm) {
   2634   DCHECK_EQ(CompareICState::BOOLEAN, state());
   2635   Label miss;
   2636   Label::Distance const miss_distance =
   2637       masm->emit_debug_code() ? Label::kFar : Label::kNear;
   2638 
   2639   __ JumpIfSmi(edx, &miss, miss_distance);
   2640   __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
   2641   __ JumpIfSmi(eax, &miss, miss_distance);
   2642   __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
   2643   __ JumpIfNotRoot(ecx, Heap::kBooleanMapRootIndex, &miss, miss_distance);
   2644   __ JumpIfNotRoot(ebx, Heap::kBooleanMapRootIndex, &miss, miss_distance);
   2645   if (!Token::IsEqualityOp(op())) {
   2646     __ mov(eax, FieldOperand(eax, Oddball::kToNumberOffset));
   2647     __ AssertSmi(eax);
   2648     __ mov(edx, FieldOperand(edx, Oddball::kToNumberOffset));
   2649     __ AssertSmi(edx);
   2650     __ push(eax);
   2651     __ mov(eax, edx);
   2652     __ pop(edx);
   2653   }
   2654   __ sub(eax, edx);
   2655   __ Ret();
   2656 
   2657   __ bind(&miss);
   2658   GenerateMiss(masm);
   2659 }
   2660 
   2661 
   2662 void CompareICStub::GenerateSmis(MacroAssembler* masm) {
   2663   DCHECK(state() == CompareICState::SMI);
   2664   Label miss;
   2665   __ mov(ecx, edx);
   2666   __ or_(ecx, eax);
   2667   __ JumpIfNotSmi(ecx, &miss, Label::kNear);
   2668 
   2669   if (GetCondition() == equal) {
   2670     // For equality we do not care about the sign of the result.
   2671     __ sub(eax, edx);
   2672   } else {
   2673     Label done;
   2674     __ sub(edx, eax);
   2675     __ j(no_overflow, &done, Label::kNear);
   2676     // Correct sign of result in case of overflow.
   2677     __ not_(edx);
   2678     __ bind(&done);
   2679     __ mov(eax, edx);
   2680   }
   2681   __ ret(0);
   2682 
   2683   __ bind(&miss);
   2684   GenerateMiss(masm);
   2685 }
   2686 
   2687 
   2688 void CompareICStub::GenerateNumbers(MacroAssembler* masm) {
   2689   DCHECK(state() == CompareICState::NUMBER);
   2690 
   2691   Label generic_stub;
   2692   Label unordered, maybe_undefined1, maybe_undefined2;
   2693   Label miss;
   2694 
   2695   if (left() == CompareICState::SMI) {
   2696     __ JumpIfNotSmi(edx, &miss);
   2697   }
   2698   if (right() == CompareICState::SMI) {
   2699     __ JumpIfNotSmi(eax, &miss);
   2700   }
   2701 
   2702   // Load left and right operand.
   2703   Label done, left, left_smi, right_smi;
   2704   __ JumpIfSmi(eax, &right_smi, Label::kNear);
   2705   __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
   2706          isolate()->factory()->heap_number_map());
   2707   __ j(not_equal, &maybe_undefined1, Label::kNear);
   2708   __ movsd(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
   2709   __ jmp(&left, Label::kNear);
   2710   __ bind(&right_smi);
   2711   __ mov(ecx, eax);  // Can't clobber eax because we can still jump away.
   2712   __ SmiUntag(ecx);
   2713   __ Cvtsi2sd(xmm1, ecx);
   2714 
   2715   __ bind(&left);
   2716   __ JumpIfSmi(edx, &left_smi, Label::kNear);
   2717   __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
   2718          isolate()->factory()->heap_number_map());
   2719   __ j(not_equal, &maybe_undefined2, Label::kNear);
   2720   __ movsd(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
   2721   __ jmp(&done);
   2722   __ bind(&left_smi);
   2723   __ mov(ecx, edx);  // Can't clobber edx because we can still jump away.
   2724   __ SmiUntag(ecx);
   2725   __ Cvtsi2sd(xmm0, ecx);
   2726 
   2727   __ bind(&done);
   2728   // Compare operands.
   2729   __ ucomisd(xmm0, xmm1);
   2730 
   2731   // Don't base result on EFLAGS when a NaN is involved.
   2732   __ j(parity_even, &unordered, Label::kNear);
   2733 
   2734   // Return a result of -1, 0, or 1, based on EFLAGS.
   2735   // Performing mov, because xor would destroy the flag register.
   2736   __ mov(eax, 0);  // equal
   2737   __ mov(ecx, Immediate(Smi::FromInt(1)));
   2738   __ cmov(above, eax, ecx);
   2739   __ mov(ecx, Immediate(Smi::FromInt(-1)));
   2740   __ cmov(below, eax, ecx);
   2741   __ ret(0);
   2742 
   2743   __ bind(&unordered);
   2744   __ bind(&generic_stub);
   2745   CompareICStub stub(isolate(), op(), CompareICState::GENERIC,
   2746                      CompareICState::GENERIC, CompareICState::GENERIC);
   2747   __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
   2748 
   2749   __ bind(&maybe_undefined1);
   2750   if (Token::IsOrderedRelationalCompareOp(op())) {
   2751     __ cmp(eax, Immediate(isolate()->factory()->undefined_value()));
   2752     __ j(not_equal, &miss);
   2753     __ JumpIfSmi(edx, &unordered);
   2754     __ CmpObjectType(edx, HEAP_NUMBER_TYPE, ecx);
   2755     __ j(not_equal, &maybe_undefined2, Label::kNear);
   2756     __ jmp(&unordered);
   2757   }
   2758 
   2759   __ bind(&maybe_undefined2);
   2760   if (Token::IsOrderedRelationalCompareOp(op())) {
   2761     __ cmp(edx, Immediate(isolate()->factory()->undefined_value()));
   2762     __ j(equal, &unordered);
   2763   }
   2764 
   2765   __ bind(&miss);
   2766   GenerateMiss(masm);
   2767 }
   2768 
   2769 
   2770 void CompareICStub::GenerateInternalizedStrings(MacroAssembler* masm) {
   2771   DCHECK(state() == CompareICState::INTERNALIZED_STRING);
   2772   DCHECK(GetCondition() == equal);
   2773 
   2774   // Registers containing left and right operands respectively.
   2775   Register left = edx;
   2776   Register right = eax;
   2777   Register tmp1 = ecx;
   2778   Register tmp2 = ebx;
   2779 
   2780   // Check that both operands are heap objects.
   2781   Label miss;
   2782   __ mov(tmp1, left);
   2783   STATIC_ASSERT(kSmiTag == 0);
   2784   __ and_(tmp1, right);
   2785   __ JumpIfSmi(tmp1, &miss, Label::kNear);
   2786 
   2787   // Check that both operands are internalized strings.
   2788   __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
   2789   __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
   2790   __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
   2791   __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
   2792   STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
   2793   __ or_(tmp1, tmp2);
   2794   __ test(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
   2795   __ j(not_zero, &miss, Label::kNear);
   2796 
   2797   // Internalized strings are compared by identity.
   2798   Label done;
   2799   __ cmp(left, right);
   2800   // Make sure eax is non-zero. At this point input operands are
   2801   // guaranteed to be non-zero.
   2802   DCHECK(right.is(eax));
   2803   __ j(not_equal, &done, Label::kNear);
   2804   STATIC_ASSERT(EQUAL == 0);
   2805   STATIC_ASSERT(kSmiTag == 0);
   2806   __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
   2807   __ bind(&done);
   2808   __ ret(0);
   2809 
   2810   __ bind(&miss);
   2811   GenerateMiss(masm);
   2812 }
   2813 
   2814 
   2815 void CompareICStub::GenerateUniqueNames(MacroAssembler* masm) {
   2816   DCHECK(state() == CompareICState::UNIQUE_NAME);
   2817   DCHECK(GetCondition() == equal);
   2818 
   2819   // Registers containing left and right operands respectively.
   2820   Register left = edx;
   2821   Register right = eax;
   2822   Register tmp1 = ecx;
   2823   Register tmp2 = ebx;
   2824 
   2825   // Check that both operands are heap objects.
   2826   Label miss;
   2827   __ mov(tmp1, left);
   2828   STATIC_ASSERT(kSmiTag == 0);
   2829   __ and_(tmp1, right);
   2830   __ JumpIfSmi(tmp1, &miss, Label::kNear);
   2831 
   2832   // Check that both operands are unique names. This leaves the instance
   2833   // types loaded in tmp1 and tmp2.
   2834   __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
   2835   __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
   2836   __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
   2837   __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
   2838 
   2839   __ JumpIfNotUniqueNameInstanceType(tmp1, &miss, Label::kNear);
   2840   __ JumpIfNotUniqueNameInstanceType(tmp2, &miss, Label::kNear);
   2841 
   2842   // Unique names are compared by identity.
   2843   Label done;
   2844   __ cmp(left, right);
   2845   // Make sure eax is non-zero. At this point input operands are
   2846   // guaranteed to be non-zero.
   2847   DCHECK(right.is(eax));
   2848   __ j(not_equal, &done, Label::kNear);
   2849   STATIC_ASSERT(EQUAL == 0);
   2850   STATIC_ASSERT(kSmiTag == 0);
   2851   __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
   2852   __ bind(&done);
   2853   __ ret(0);
   2854 
   2855   __ bind(&miss);
   2856   GenerateMiss(masm);
   2857 }
   2858 
   2859 
   2860 void CompareICStub::GenerateStrings(MacroAssembler* masm) {
   2861   DCHECK(state() == CompareICState::STRING);
   2862   Label miss;
   2863 
   2864   bool equality = Token::IsEqualityOp(op());
   2865 
   2866   // Registers containing left and right operands respectively.
   2867   Register left = edx;
   2868   Register right = eax;
   2869   Register tmp1 = ecx;
   2870   Register tmp2 = ebx;
   2871   Register tmp3 = edi;
   2872 
   2873   // Check that both operands are heap objects.
   2874   __ mov(tmp1, left);
   2875   STATIC_ASSERT(kSmiTag == 0);
   2876   __ and_(tmp1, right);
   2877   __ JumpIfSmi(tmp1, &miss);
   2878 
   2879   // Check that both operands are strings. This leaves the instance
   2880   // types loaded in tmp1 and tmp2.
   2881   __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
   2882   __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
   2883   __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
   2884   __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
   2885   __ mov(tmp3, tmp1);
   2886   STATIC_ASSERT(kNotStringTag != 0);
   2887   __ or_(tmp3, tmp2);
   2888   __ test(tmp3, Immediate(kIsNotStringMask));
   2889   __ j(not_zero, &miss);
   2890 
   2891   // Fast check for identical strings.
   2892   Label not_same;
   2893   __ cmp(left, right);
   2894   __ j(not_equal, &not_same, Label::kNear);
   2895   STATIC_ASSERT(EQUAL == 0);
   2896   STATIC_ASSERT(kSmiTag == 0);
   2897   __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
   2898   __ ret(0);
   2899 
   2900   // Handle not identical strings.
   2901   __ bind(&not_same);
   2902 
   2903   // Check that both strings are internalized. If they are, we're done
   2904   // because we already know they are not identical.  But in the case of
   2905   // non-equality compare, we still need to determine the order. We
   2906   // also know they are both strings.
   2907   if (equality) {
   2908     Label do_compare;
   2909     STATIC_ASSERT(kInternalizedTag == 0);
   2910     __ or_(tmp1, tmp2);
   2911     __ test(tmp1, Immediate(kIsNotInternalizedMask));
   2912     __ j(not_zero, &do_compare, Label::kNear);
   2913     // Make sure eax is non-zero. At this point input operands are
   2914     // guaranteed to be non-zero.
   2915     DCHECK(right.is(eax));
   2916     __ ret(0);
   2917     __ bind(&do_compare);
   2918   }
   2919 
   2920   // Check that both strings are sequential one-byte.
   2921   Label runtime;
   2922   __ JumpIfNotBothSequentialOneByteStrings(left, right, tmp1, tmp2, &runtime);
   2923 
   2924   // Compare flat one byte strings. Returns when done.
   2925   if (equality) {
   2926     StringHelper::GenerateFlatOneByteStringEquals(masm, left, right, tmp1,
   2927                                                   tmp2);
   2928   } else {
   2929     StringHelper::GenerateCompareFlatOneByteStrings(masm, left, right, tmp1,
   2930                                                     tmp2, tmp3);
   2931   }
   2932 
   2933   // Handle more complex cases in runtime.
   2934   __ bind(&runtime);
   2935   if (equality) {
   2936     {
   2937       FrameScope scope(masm, StackFrame::INTERNAL);
   2938       __ Push(left);
   2939       __ Push(right);
   2940       __ CallRuntime(Runtime::kStringEqual);
   2941     }
   2942     __ sub(eax, Immediate(masm->isolate()->factory()->true_value()));
   2943     __ Ret();
   2944   } else {
   2945     __ pop(tmp1);  // Return address.
   2946     __ push(left);
   2947     __ push(right);
   2948     __ push(tmp1);
   2949     __ TailCallRuntime(Runtime::kStringCompare);
   2950   }
   2951 
   2952   __ bind(&miss);
   2953   GenerateMiss(masm);
   2954 }
   2955 
   2956 
   2957 void CompareICStub::GenerateReceivers(MacroAssembler* masm) {
   2958   DCHECK_EQ(CompareICState::RECEIVER, state());
   2959   Label miss;
   2960   __ mov(ecx, edx);
   2961   __ and_(ecx, eax);
   2962   __ JumpIfSmi(ecx, &miss, Label::kNear);
   2963 
   2964   STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
   2965   __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
   2966   __ j(below, &miss, Label::kNear);
   2967   __ CmpObjectType(edx, FIRST_JS_RECEIVER_TYPE, ecx);
   2968   __ j(below, &miss, Label::kNear);
   2969 
   2970   DCHECK_EQ(equal, GetCondition());
   2971   __ sub(eax, edx);
   2972   __ ret(0);
   2973 
   2974   __ bind(&miss);
   2975   GenerateMiss(masm);
   2976 }
   2977 
   2978 
   2979 void CompareICStub::GenerateKnownReceivers(MacroAssembler* masm) {
   2980   Label miss;
   2981   Handle<WeakCell> cell = Map::WeakCellForMap(known_map_);
   2982   __ mov(ecx, edx);
   2983   __ and_(ecx, eax);
   2984   __ JumpIfSmi(ecx, &miss, Label::kNear);
   2985 
   2986   __ GetWeakValue(edi, cell);
   2987   __ cmp(edi, FieldOperand(eax, HeapObject::kMapOffset));
   2988   __ j(not_equal, &miss, Label::kNear);
   2989   __ cmp(edi, FieldOperand(edx, HeapObject::kMapOffset));
   2990   __ j(not_equal, &miss, Label::kNear);
   2991 
   2992   if (Token::IsEqualityOp(op())) {
   2993     __ sub(eax, edx);
   2994     __ ret(0);
   2995   } else {
   2996     __ PopReturnAddressTo(ecx);
   2997     __ Push(edx);
   2998     __ Push(eax);
   2999     __ Push(Immediate(Smi::FromInt(NegativeComparisonResult(GetCondition()))));
   3000     __ PushReturnAddressFrom(ecx);
   3001     __ TailCallRuntime(Runtime::kCompare);
   3002   }
   3003 
   3004   __ bind(&miss);
   3005   GenerateMiss(masm);
   3006 }
   3007 
   3008 
   3009 void CompareICStub::GenerateMiss(MacroAssembler* masm) {
   3010   {
   3011     // Call the runtime system in a fresh internal frame.
   3012     FrameScope scope(masm, StackFrame::INTERNAL);
   3013     __ push(edx);  // Preserve edx and eax.
   3014     __ push(eax);
   3015     __ push(edx);  // And also use them as the arguments.
   3016     __ push(eax);
   3017     __ push(Immediate(Smi::FromInt(op())));
   3018     __ CallRuntime(Runtime::kCompareIC_Miss);
   3019     // Compute the entry point of the rewritten stub.
   3020     __ lea(edi, FieldOperand(eax, Code::kHeaderSize));
   3021     __ pop(eax);
   3022     __ pop(edx);
   3023   }
   3024 
   3025   // Do a tail call to the rewritten stub.
   3026   __ jmp(edi);
   3027 }
   3028 
   3029 
   3030 // Helper function used to check that the dictionary doesn't contain
   3031 // the property. This function may return false negatives, so miss_label
   3032 // must always call a backup property check that is complete.
   3033 // This function is safe to call if the receiver has fast properties.
   3034 // Name must be a unique name and receiver must be a heap object.
   3035 void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
   3036                                                       Label* miss,
   3037                                                       Label* done,
   3038                                                       Register properties,
   3039                                                       Handle<Name> name,
   3040                                                       Register r0) {
   3041   DCHECK(name->IsUniqueName());
   3042 
   3043   // If names of slots in range from 1 to kProbes - 1 for the hash value are
   3044   // not equal to the name and kProbes-th slot is not used (its name is the
   3045   // undefined value), it guarantees the hash table doesn't contain the
   3046   // property. It's true even if some slots represent deleted properties
   3047   // (their names are the hole value).
   3048   for (int i = 0; i < kInlinedProbes; i++) {
   3049     // Compute the masked index: (hash + i + i * i) & mask.
   3050     Register index = r0;
   3051     // Capacity is smi 2^n.
   3052     __ mov(index, FieldOperand(properties, kCapacityOffset));
   3053     __ dec(index);
   3054     __ and_(index,
   3055             Immediate(Smi::FromInt(name->Hash() +
   3056                                    NameDictionary::GetProbeOffset(i))));
   3057 
   3058     // Scale the index by multiplying by the entry size.
   3059     STATIC_ASSERT(NameDictionary::kEntrySize == 3);
   3060     __ lea(index, Operand(index, index, times_2, 0));  // index *= 3.
   3061     Register entity_name = r0;
   3062     // Having undefined at this place means the name is not contained.
   3063     STATIC_ASSERT(kSmiTagSize == 1);
   3064     __ mov(entity_name, Operand(properties, index, times_half_pointer_size,
   3065                                 kElementsStartOffset - kHeapObjectTag));
   3066     __ cmp(entity_name, masm->isolate()->factory()->undefined_value());
   3067     __ j(equal, done);
   3068 
   3069     // Stop if found the property.
   3070     __ cmp(entity_name, Handle<Name>(name));
   3071     __ j(equal, miss);
   3072 
   3073     Label good;
   3074     // Check for the hole and skip.
   3075     __ cmp(entity_name, masm->isolate()->factory()->the_hole_value());
   3076     __ j(equal, &good, Label::kNear);
   3077 
   3078     // Check if the entry name is not a unique name.
   3079     __ mov(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
   3080     __ JumpIfNotUniqueNameInstanceType(
   3081         FieldOperand(entity_name, Map::kInstanceTypeOffset), miss);
   3082     __ bind(&good);
   3083   }
   3084 
   3085   NameDictionaryLookupStub stub(masm->isolate(), properties, r0, r0,
   3086                                 NEGATIVE_LOOKUP);
   3087   __ push(Immediate(Handle<Object>(name)));
   3088   __ push(Immediate(name->Hash()));
   3089   __ CallStub(&stub);
   3090   __ test(r0, r0);
   3091   __ j(not_zero, miss);
   3092   __ jmp(done);
   3093 }
   3094 
   3095 
   3096 // Probe the name dictionary in the |elements| register. Jump to the
   3097 // |done| label if a property with the given name is found leaving the
   3098 // index into the dictionary in |r0|. Jump to the |miss| label
   3099 // otherwise.
   3100 void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
   3101                                                       Label* miss,
   3102                                                       Label* done,
   3103                                                       Register elements,
   3104                                                       Register name,
   3105                                                       Register r0,
   3106                                                       Register r1) {
   3107   DCHECK(!elements.is(r0));
   3108   DCHECK(!elements.is(r1));
   3109   DCHECK(!name.is(r0));
   3110   DCHECK(!name.is(r1));
   3111 
   3112   __ AssertName(name);
   3113 
   3114   __ mov(r1, FieldOperand(elements, kCapacityOffset));
   3115   __ shr(r1, kSmiTagSize);  // convert smi to int
   3116   __ dec(r1);
   3117 
   3118   // Generate an unrolled loop that performs a few probes before
   3119   // giving up. Measurements done on Gmail indicate that 2 probes
   3120   // cover ~93% of loads from dictionaries.
   3121   for (int i = 0; i < kInlinedProbes; i++) {
   3122     // Compute the masked index: (hash + i + i * i) & mask.
   3123     __ mov(r0, FieldOperand(name, Name::kHashFieldOffset));
   3124     __ shr(r0, Name::kHashShift);
   3125     if (i > 0) {
   3126       __ add(r0, Immediate(NameDictionary::GetProbeOffset(i)));
   3127     }
   3128     __ and_(r0, r1);
   3129 
   3130     // Scale the index by multiplying by the entry size.
   3131     STATIC_ASSERT(NameDictionary::kEntrySize == 3);
   3132     __ lea(r0, Operand(r0, r0, times_2, 0));  // r0 = r0 * 3
   3133 
   3134     // Check if the key is identical to the name.
   3135     __ cmp(name, Operand(elements,
   3136                          r0,
   3137                          times_4,
   3138                          kElementsStartOffset - kHeapObjectTag));
   3139     __ j(equal, done);
   3140   }
   3141 
   3142   NameDictionaryLookupStub stub(masm->isolate(), elements, r1, r0,
   3143                                 POSITIVE_LOOKUP);
   3144   __ push(name);
   3145   __ mov(r0, FieldOperand(name, Name::kHashFieldOffset));
   3146   __ shr(r0, Name::kHashShift);
   3147   __ push(r0);
   3148   __ CallStub(&stub);
   3149 
   3150   __ test(r1, r1);
   3151   __ j(zero, miss);
   3152   __ jmp(done);
   3153 }
   3154 
   3155 
   3156 void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
   3157   // This stub overrides SometimesSetsUpAFrame() to return false.  That means
   3158   // we cannot call anything that could cause a GC from this stub.
   3159   // Stack frame on entry:
   3160   //  esp[0 * kPointerSize]: return address.
   3161   //  esp[1 * kPointerSize]: key's hash.
   3162   //  esp[2 * kPointerSize]: key.
   3163   // Registers:
   3164   //  dictionary_: NameDictionary to probe.
   3165   //  result_: used as scratch.
   3166   //  index_: will hold an index of entry if lookup is successful.
   3167   //          might alias with result_.
   3168   // Returns:
   3169   //  result_ is zero if lookup failed, non zero otherwise.
   3170 
   3171   Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
   3172 
   3173   Register scratch = result();
   3174 
   3175   __ mov(scratch, FieldOperand(dictionary(), kCapacityOffset));
   3176   __ dec(scratch);
   3177   __ SmiUntag(scratch);
   3178   __ push(scratch);
   3179 
   3180   // If names of slots in range from 1 to kProbes - 1 for the hash value are
   3181   // not equal to the name and kProbes-th slot is not used (its name is the
   3182   // undefined value), it guarantees the hash table doesn't contain the
   3183   // property. It's true even if some slots represent deleted properties
   3184   // (their names are the null value).
   3185   for (int i = kInlinedProbes; i < kTotalProbes; i++) {
   3186     // Compute the masked index: (hash + i + i * i) & mask.
   3187     __ mov(scratch, Operand(esp, 2 * kPointerSize));
   3188     if (i > 0) {
   3189       __ add(scratch, Immediate(NameDictionary::GetProbeOffset(i)));
   3190     }
   3191     __ and_(scratch, Operand(esp, 0));
   3192 
   3193     // Scale the index by multiplying by the entry size.
   3194     STATIC_ASSERT(NameDictionary::kEntrySize == 3);
   3195     __ lea(index(), Operand(scratch, scratch, times_2, 0));  // index *= 3.
   3196 
   3197     // Having undefined at this place means the name is not contained.
   3198     STATIC_ASSERT(kSmiTagSize == 1);
   3199     __ mov(scratch, Operand(dictionary(), index(), times_pointer_size,
   3200                             kElementsStartOffset - kHeapObjectTag));
   3201     __ cmp(scratch, isolate()->factory()->undefined_value());
   3202     __ j(equal, &not_in_dictionary);
   3203 
   3204     // Stop if found the property.
   3205     __ cmp(scratch, Operand(esp, 3 * kPointerSize));
   3206     __ j(equal, &in_dictionary);
   3207 
   3208     if (i != kTotalProbes - 1 && mode() == NEGATIVE_LOOKUP) {
   3209       // If we hit a key that is not a unique name during negative
   3210       // lookup we have to bailout as this key might be equal to the
   3211       // key we are looking for.
   3212 
   3213       // Check if the entry name is not a unique name.
   3214       __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
   3215       __ JumpIfNotUniqueNameInstanceType(
   3216           FieldOperand(scratch, Map::kInstanceTypeOffset),
   3217           &maybe_in_dictionary);
   3218     }
   3219   }
   3220 
   3221   __ bind(&maybe_in_dictionary);
   3222   // If we are doing negative lookup then probing failure should be
   3223   // treated as a lookup success. For positive lookup probing failure
   3224   // should be treated as lookup failure.
   3225   if (mode() == POSITIVE_LOOKUP) {
   3226     __ mov(result(), Immediate(0));
   3227     __ Drop(1);
   3228     __ ret(2 * kPointerSize);
   3229   }
   3230 
   3231   __ bind(&in_dictionary);
   3232   __ mov(result(), Immediate(1));
   3233   __ Drop(1);
   3234   __ ret(2 * kPointerSize);
   3235 
   3236   __ bind(&not_in_dictionary);
   3237   __ mov(result(), Immediate(0));
   3238   __ Drop(1);
   3239   __ ret(2 * kPointerSize);
   3240 }
   3241 
   3242 
   3243 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
   3244     Isolate* isolate) {
   3245   StoreBufferOverflowStub stub(isolate, kDontSaveFPRegs);
   3246   stub.GetCode();
   3247   StoreBufferOverflowStub stub2(isolate, kSaveFPRegs);
   3248   stub2.GetCode();
   3249 }
   3250 
   3251 
   3252 // Takes the input in 3 registers: address_ value_ and object_.  A pointer to
   3253 // the value has just been written into the object, now this stub makes sure
   3254 // we keep the GC informed.  The word in the object where the value has been
   3255 // written is in the address register.
   3256 void RecordWriteStub::Generate(MacroAssembler* masm) {
   3257   Label skip_to_incremental_noncompacting;
   3258   Label skip_to_incremental_compacting;
   3259 
   3260   // The first two instructions are generated with labels so as to get the
   3261   // offset fixed up correctly by the bind(Label*) call.  We patch it back and
   3262   // forth between a compare instructions (a nop in this position) and the
   3263   // real branch when we start and stop incremental heap marking.
   3264   __ jmp(&skip_to_incremental_noncompacting, Label::kNear);
   3265   __ jmp(&skip_to_incremental_compacting, Label::kFar);
   3266 
   3267   if (remembered_set_action() == EMIT_REMEMBERED_SET) {
   3268     __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
   3269                            MacroAssembler::kReturnAtEnd);
   3270   } else {
   3271     __ ret(0);
   3272   }
   3273 
   3274   __ bind(&skip_to_incremental_noncompacting);
   3275   GenerateIncremental(masm, INCREMENTAL);
   3276 
   3277   __ bind(&skip_to_incremental_compacting);
   3278   GenerateIncremental(masm, INCREMENTAL_COMPACTION);
   3279 
   3280   // Initial mode of the stub is expected to be STORE_BUFFER_ONLY.
   3281   // Will be checked in IncrementalMarking::ActivateGeneratedStub.
   3282   masm->set_byte_at(0, kTwoByteNopInstruction);
   3283   masm->set_byte_at(2, kFiveByteNopInstruction);
   3284 }
   3285 
   3286 
   3287 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
   3288   regs_.Save(masm);
   3289 
   3290   if (remembered_set_action() == EMIT_REMEMBERED_SET) {
   3291     Label dont_need_remembered_set;
   3292 
   3293     __ mov(regs_.scratch0(), Operand(regs_.address(), 0));
   3294     __ JumpIfNotInNewSpace(regs_.scratch0(),  // Value.
   3295                            regs_.scratch0(),
   3296                            &dont_need_remembered_set);
   3297 
   3298     __ JumpIfInNewSpace(regs_.object(), regs_.scratch0(),
   3299                         &dont_need_remembered_set);
   3300 
   3301     // First notify the incremental marker if necessary, then update the
   3302     // remembered set.
   3303     CheckNeedsToInformIncrementalMarker(
   3304         masm,
   3305         kUpdateRememberedSetOnNoNeedToInformIncrementalMarker,
   3306         mode);
   3307     InformIncrementalMarker(masm);
   3308     regs_.Restore(masm);
   3309     __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
   3310                            MacroAssembler::kReturnAtEnd);
   3311 
   3312     __ bind(&dont_need_remembered_set);
   3313   }
   3314 
   3315   CheckNeedsToInformIncrementalMarker(
   3316       masm,
   3317       kReturnOnNoNeedToInformIncrementalMarker,
   3318       mode);
   3319   InformIncrementalMarker(masm);
   3320   regs_.Restore(masm);
   3321   __ ret(0);
   3322 }
   3323 
   3324 
   3325 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
   3326   regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode());
   3327   int argument_count = 3;
   3328   __ PrepareCallCFunction(argument_count, regs_.scratch0());
   3329   __ mov(Operand(esp, 0 * kPointerSize), regs_.object());
   3330   __ mov(Operand(esp, 1 * kPointerSize), regs_.address());  // Slot.
   3331   __ mov(Operand(esp, 2 * kPointerSize),
   3332          Immediate(ExternalReference::isolate_address(isolate())));
   3333 
   3334   AllowExternalCallThatCantCauseGC scope(masm);
   3335   __ CallCFunction(
   3336       ExternalReference::incremental_marking_record_write_function(isolate()),
   3337       argument_count);
   3338 
   3339   regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode());
   3340 }
   3341 
   3342 
   3343 void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
   3344     MacroAssembler* masm,
   3345     OnNoNeedToInformIncrementalMarker on_no_need,
   3346     Mode mode) {
   3347   Label object_is_black, need_incremental, need_incremental_pop_object;
   3348 
   3349   __ mov(regs_.scratch0(), Immediate(~Page::kPageAlignmentMask));
   3350   __ and_(regs_.scratch0(), regs_.object());
   3351   __ mov(regs_.scratch1(),
   3352          Operand(regs_.scratch0(),
   3353                  MemoryChunk::kWriteBarrierCounterOffset));
   3354   __ sub(regs_.scratch1(), Immediate(1));
   3355   __ mov(Operand(regs_.scratch0(),
   3356                  MemoryChunk::kWriteBarrierCounterOffset),
   3357          regs_.scratch1());
   3358   __ j(negative, &need_incremental);
   3359 
   3360   // Let's look at the color of the object:  If it is not black we don't have
   3361   // to inform the incremental marker.
   3362   __ JumpIfBlack(regs_.object(),
   3363                  regs_.scratch0(),
   3364                  regs_.scratch1(),
   3365                  &object_is_black,
   3366                  Label::kNear);
   3367 
   3368   regs_.Restore(masm);
   3369   if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
   3370     __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
   3371                            MacroAssembler::kReturnAtEnd);
   3372   } else {
   3373     __ ret(0);
   3374   }
   3375 
   3376   __ bind(&object_is_black);
   3377 
   3378   // Get the value from the slot.
   3379   __ mov(regs_.scratch0(), Operand(regs_.address(), 0));
   3380 
   3381   if (mode == INCREMENTAL_COMPACTION) {
   3382     Label ensure_not_white;
   3383 
   3384     __ CheckPageFlag(regs_.scratch0(),  // Contains value.
   3385                      regs_.scratch1(),  // Scratch.
   3386                      MemoryChunk::kEvacuationCandidateMask,
   3387                      zero,
   3388                      &ensure_not_white,
   3389                      Label::kNear);
   3390 
   3391     __ CheckPageFlag(regs_.object(),
   3392                      regs_.scratch1(),  // Scratch.
   3393                      MemoryChunk::kSkipEvacuationSlotsRecordingMask,
   3394                      not_zero,
   3395                      &ensure_not_white,
   3396                      Label::kNear);
   3397 
   3398     __ jmp(&need_incremental);
   3399 
   3400     __ bind(&ensure_not_white);
   3401   }
   3402 
   3403   // We need an extra register for this, so we push the object register
   3404   // temporarily.
   3405   __ push(regs_.object());
   3406   __ JumpIfWhite(regs_.scratch0(),  // The value.
   3407                  regs_.scratch1(),  // Scratch.
   3408                  regs_.object(),    // Scratch.
   3409                  &need_incremental_pop_object, Label::kNear);
   3410   __ pop(regs_.object());
   3411 
   3412   regs_.Restore(masm);
   3413   if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
   3414     __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
   3415                            MacroAssembler::kReturnAtEnd);
   3416   } else {
   3417     __ ret(0);
   3418   }
   3419 
   3420   __ bind(&need_incremental_pop_object);
   3421   __ pop(regs_.object());
   3422 
   3423   __ bind(&need_incremental);
   3424 
   3425   // Fall through when we need to inform the incremental marker.
   3426 }
   3427 
   3428 
   3429 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
   3430   CEntryStub ces(isolate(), 1, kSaveFPRegs);
   3431   __ call(ces.GetCode(), RelocInfo::CODE_TARGET);
   3432   int parameter_count_offset =
   3433       StubFailureTrampolineFrameConstants::kArgumentsLengthOffset;
   3434   __ mov(ebx, MemOperand(ebp, parameter_count_offset));
   3435   masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
   3436   __ pop(ecx);
   3437   int additional_offset =
   3438       function_mode() == JS_FUNCTION_STUB_MODE ? kPointerSize : 0;
   3439   __ lea(esp, MemOperand(esp, ebx, times_pointer_size, additional_offset));
   3440   __ jmp(ecx);  // Return to IC Miss stub, continuation still on stack.
   3441 }
   3442 
   3443 
   3444 void LoadICTrampolineStub::Generate(MacroAssembler* masm) {
   3445   __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
   3446   LoadICStub stub(isolate());
   3447   stub.GenerateForTrampoline(masm);
   3448 }
   3449 
   3450 
   3451 void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) {
   3452   __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
   3453   KeyedLoadICStub stub(isolate());
   3454   stub.GenerateForTrampoline(masm);
   3455 }
   3456 
   3457 
   3458 static void HandleArrayCases(MacroAssembler* masm, Register receiver,
   3459                              Register key, Register vector, Register slot,
   3460                              Register feedback, bool is_polymorphic,
   3461                              Label* miss) {
   3462   // feedback initially contains the feedback array
   3463   Label next, next_loop, prepare_next;
   3464   Label load_smi_map, compare_map;
   3465   Label start_polymorphic;
   3466 
   3467   __ push(receiver);
   3468   __ push(vector);
   3469 
   3470   Register receiver_map = receiver;
   3471   Register cached_map = vector;
   3472 
   3473   // Receiver might not be a heap object.
   3474   __ JumpIfSmi(receiver, &load_smi_map);
   3475   __ mov(receiver_map, FieldOperand(receiver, 0));
   3476   __ bind(&compare_map);
   3477   __ mov(cached_map, FieldOperand(feedback, FixedArray::OffsetOfElementAt(0)));
   3478 
   3479   // A named keyed load might have a 2 element array, all other cases can count
   3480   // on an array with at least 2 {map, handler} pairs, so they can go right
   3481   // into polymorphic array handling.
   3482   __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
   3483   __ j(not_equal, is_polymorphic ? &start_polymorphic : &next);
   3484 
   3485   // found, now call handler.
   3486   Register handler = feedback;
   3487   __ mov(handler, FieldOperand(feedback, FixedArray::OffsetOfElementAt(1)));
   3488   __ pop(vector);
   3489   __ pop(receiver);
   3490   __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
   3491   __ jmp(handler);
   3492 
   3493   if (!is_polymorphic) {
   3494     __ bind(&next);
   3495     __ cmp(FieldOperand(feedback, FixedArray::kLengthOffset),
   3496            Immediate(Smi::FromInt(2)));
   3497     __ j(not_equal, &start_polymorphic);
   3498     __ pop(vector);
   3499     __ pop(receiver);
   3500     __ jmp(miss);
   3501   }
   3502 
   3503   // Polymorphic, we have to loop from 2 to N
   3504   __ bind(&start_polymorphic);
   3505   __ push(key);
   3506   Register counter = key;
   3507   __ mov(counter, Immediate(Smi::FromInt(2)));
   3508   __ bind(&next_loop);
   3509   __ mov(cached_map, FieldOperand(feedback, counter, times_half_pointer_size,
   3510                                   FixedArray::kHeaderSize));
   3511   __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
   3512   __ j(not_equal, &prepare_next);
   3513   __ mov(handler, FieldOperand(feedback, counter, times_half_pointer_size,
   3514                                FixedArray::kHeaderSize + kPointerSize));
   3515   __ pop(key);
   3516   __ pop(vector);
   3517   __ pop(receiver);
   3518   __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
   3519   __ jmp(handler);
   3520 
   3521   __ bind(&prepare_next);
   3522   __ add(counter, Immediate(Smi::FromInt(2)));
   3523   __ cmp(counter, FieldOperand(feedback, FixedArray::kLengthOffset));
   3524   __ j(less, &next_loop);
   3525 
   3526   // We exhausted our array of map handler pairs.
   3527   __ pop(key);
   3528   __ pop(vector);
   3529   __ pop(receiver);
   3530   __ jmp(miss);
   3531 
   3532   __ bind(&load_smi_map);
   3533   __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
   3534   __ jmp(&compare_map);
   3535 }
   3536 
   3537 
   3538 static void HandleMonomorphicCase(MacroAssembler* masm, Register receiver,
   3539                                   Register key, Register vector, Register slot,
   3540                                   Register weak_cell, Label* miss) {
   3541   // feedback initially contains the feedback array
   3542   Label compare_smi_map;
   3543 
   3544   // Move the weak map into the weak_cell register.
   3545   Register ic_map = weak_cell;
   3546   __ mov(ic_map, FieldOperand(weak_cell, WeakCell::kValueOffset));
   3547 
   3548   // Receiver might not be a heap object.
   3549   __ JumpIfSmi(receiver, &compare_smi_map);
   3550   __ cmp(ic_map, FieldOperand(receiver, 0));
   3551   __ j(not_equal, miss);
   3552   Register handler = weak_cell;
   3553   __ mov(handler, FieldOperand(vector, slot, times_half_pointer_size,
   3554                                FixedArray::kHeaderSize + kPointerSize));
   3555   __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
   3556   __ jmp(handler);
   3557 
   3558   // In microbenchmarks, it made sense to unroll this code so that the call to
   3559   // the handler is duplicated for a HeapObject receiver and a Smi receiver.
   3560   __ bind(&compare_smi_map);
   3561   __ CompareRoot(ic_map, Heap::kHeapNumberMapRootIndex);
   3562   __ j(not_equal, miss);
   3563   __ mov(handler, FieldOperand(vector, slot, times_half_pointer_size,
   3564                                FixedArray::kHeaderSize + kPointerSize));
   3565   __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
   3566   __ jmp(handler);
   3567 }
   3568 
   3569 
   3570 void LoadICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); }
   3571 
   3572 
   3573 void LoadICStub::GenerateForTrampoline(MacroAssembler* masm) {
   3574   GenerateImpl(masm, true);
   3575 }
   3576 
   3577 
   3578 void LoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
   3579   Register receiver = LoadWithVectorDescriptor::ReceiverRegister();  // edx
   3580   Register name = LoadWithVectorDescriptor::NameRegister();          // ecx
   3581   Register vector = LoadWithVectorDescriptor::VectorRegister();      // ebx
   3582   Register slot = LoadWithVectorDescriptor::SlotRegister();          // eax
   3583   Register scratch = edi;
   3584   __ mov(scratch, FieldOperand(vector, slot, times_half_pointer_size,
   3585                                FixedArray::kHeaderSize));
   3586 
   3587   // Is it a weak cell?
   3588   Label try_array;
   3589   Label not_array, smi_key, key_okay, miss;
   3590   __ CompareRoot(FieldOperand(scratch, 0), Heap::kWeakCellMapRootIndex);
   3591   __ j(not_equal, &try_array);
   3592   HandleMonomorphicCase(masm, receiver, name, vector, slot, scratch, &miss);
   3593 
   3594   // Is it a fixed array?
   3595   __ bind(&try_array);
   3596   __ CompareRoot(FieldOperand(scratch, 0), Heap::kFixedArrayMapRootIndex);
   3597   __ j(not_equal, &not_array);
   3598   HandleArrayCases(masm, receiver, name, vector, slot, scratch, true, &miss);
   3599 
   3600   __ bind(&not_array);
   3601   __ CompareRoot(scratch, Heap::kmegamorphic_symbolRootIndex);
   3602   __ j(not_equal, &miss);
   3603   __ push(slot);
   3604   __ push(vector);
   3605   Code::Flags code_flags =
   3606       Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(Code::LOAD_IC));
   3607   masm->isolate()->stub_cache()->GenerateProbe(masm, Code::LOAD_IC, code_flags,
   3608                                                receiver, name, vector, scratch);
   3609   __ pop(vector);
   3610   __ pop(slot);
   3611 
   3612   __ bind(&miss);
   3613   LoadIC::GenerateMiss(masm);
   3614 }
   3615 
   3616 
   3617 void KeyedLoadICStub::Generate(MacroAssembler* masm) {
   3618   GenerateImpl(masm, false);
   3619 }
   3620 
   3621 
   3622 void KeyedLoadICStub::GenerateForTrampoline(MacroAssembler* masm) {
   3623   GenerateImpl(masm, true);
   3624 }
   3625 
   3626 
   3627 void KeyedLoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
   3628   Register receiver = LoadWithVectorDescriptor::ReceiverRegister();  // edx
   3629   Register key = LoadWithVectorDescriptor::NameRegister();           // ecx
   3630   Register vector = LoadWithVectorDescriptor::VectorRegister();      // ebx
   3631   Register slot = LoadWithVectorDescriptor::SlotRegister();          // eax
   3632   Register feedback = edi;
   3633   __ mov(feedback, FieldOperand(vector, slot, times_half_pointer_size,
   3634                                 FixedArray::kHeaderSize));
   3635   // Is it a weak cell?
   3636   Label try_array;
   3637   Label not_array, smi_key, key_okay, miss;
   3638   __ CompareRoot(FieldOperand(feedback, 0), Heap::kWeakCellMapRootIndex);
   3639   __ j(not_equal, &try_array);
   3640   HandleMonomorphicCase(masm, receiver, key, vector, slot, feedback, &miss);
   3641 
   3642   __ bind(&try_array);
   3643   // Is it a fixed array?
   3644   __ CompareRoot(FieldOperand(feedback, 0), Heap::kFixedArrayMapRootIndex);
   3645   __ j(not_equal, &not_array);
   3646 
   3647   // We have a polymorphic element handler.
   3648   Label polymorphic, try_poly_name;
   3649   __ bind(&polymorphic);
   3650   HandleArrayCases(masm, receiver, key, vector, slot, feedback, true, &miss);
   3651 
   3652   __ bind(&not_array);
   3653   // Is it generic?
   3654   __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
   3655   __ j(not_equal, &try_poly_name);
   3656   Handle<Code> megamorphic_stub =
   3657       KeyedLoadIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState());
   3658   __ jmp(megamorphic_stub, RelocInfo::CODE_TARGET);
   3659 
   3660   __ bind(&try_poly_name);
   3661   // We might have a name in feedback, and a fixed array in the next slot.
   3662   __ cmp(key, feedback);
   3663   __ j(not_equal, &miss);
   3664   // If the name comparison succeeded, we know we have a fixed array with
   3665   // at least one map/handler pair.
   3666   __ mov(feedback, FieldOperand(vector, slot, times_half_pointer_size,
   3667                                 FixedArray::kHeaderSize + kPointerSize));
   3668   HandleArrayCases(masm, receiver, key, vector, slot, feedback, false, &miss);
   3669 
   3670   __ bind(&miss);
   3671   KeyedLoadIC::GenerateMiss(masm);
   3672 }
   3673 
   3674 
   3675 void VectorStoreICTrampolineStub::Generate(MacroAssembler* masm) {
   3676   __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
   3677   VectorStoreICStub stub(isolate(), state());
   3678   stub.GenerateForTrampoline(masm);
   3679 }
   3680 
   3681 
   3682 void VectorKeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) {
   3683   __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
   3684   VectorKeyedStoreICStub stub(isolate(), state());
   3685   stub.GenerateForTrampoline(masm);
   3686 }
   3687 
   3688 
   3689 void VectorStoreICStub::Generate(MacroAssembler* masm) {
   3690   GenerateImpl(masm, false);
   3691 }
   3692 
   3693 
   3694 void VectorStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
   3695   GenerateImpl(masm, true);
   3696 }
   3697 
   3698 
   3699 // value is on the stack already.
   3700 static void HandlePolymorphicStoreCase(MacroAssembler* masm, Register receiver,
   3701                                        Register key, Register vector,
   3702                                        Register slot, Register feedback,
   3703                                        bool is_polymorphic, Label* miss) {
   3704   // feedback initially contains the feedback array
   3705   Label next, next_loop, prepare_next;
   3706   Label load_smi_map, compare_map;
   3707   Label start_polymorphic;
   3708   Label pop_and_miss;
   3709   ExternalReference virtual_register =
   3710       ExternalReference::virtual_handler_register(masm->isolate());
   3711 
   3712   __ push(receiver);
   3713   __ push(vector);
   3714 
   3715   Register receiver_map = receiver;
   3716   Register cached_map = vector;
   3717 
   3718   // Receiver might not be a heap object.
   3719   __ JumpIfSmi(receiver, &load_smi_map);
   3720   __ mov(receiver_map, FieldOperand(receiver, 0));
   3721   __ bind(&compare_map);
   3722   __ mov(cached_map, FieldOperand(feedback, FixedArray::OffsetOfElementAt(0)));
   3723 
   3724   // A named keyed store might have a 2 element array, all other cases can count
   3725   // on an array with at least 2 {map, handler} pairs, so they can go right
   3726   // into polymorphic array handling.
   3727   __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
   3728   __ j(not_equal, &start_polymorphic);
   3729 
   3730   // found, now call handler.
   3731   Register handler = feedback;
   3732   DCHECK(handler.is(VectorStoreICDescriptor::ValueRegister()));
   3733   __ mov(handler, FieldOperand(feedback, FixedArray::OffsetOfElementAt(1)));
   3734   __ pop(vector);
   3735   __ pop(receiver);
   3736   __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
   3737   __ mov(Operand::StaticVariable(virtual_register), handler);
   3738   __ pop(handler);  // Pop "value".
   3739   __ jmp(Operand::StaticVariable(virtual_register));
   3740 
   3741   // Polymorphic, we have to loop from 2 to N
   3742   __ bind(&start_polymorphic);
   3743   __ push(key);
   3744   Register counter = key;
   3745   __ mov(counter, Immediate(Smi::FromInt(2)));
   3746 
   3747   if (!is_polymorphic) {
   3748     // If is_polymorphic is false, we may only have a two element array.
   3749     // Check against length now in that case.
   3750     __ cmp(counter, FieldOperand(feedback, FixedArray::kLengthOffset));
   3751     __ j(greater_equal, &pop_and_miss);
   3752   }
   3753 
   3754   __ bind(&next_loop);
   3755   __ mov(cached_map, FieldOperand(feedback, counter, times_half_pointer_size,
   3756                                   FixedArray::kHeaderSize));
   3757   __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
   3758   __ j(not_equal, &prepare_next);
   3759   __ mov(handler, FieldOperand(feedback, counter, times_half_pointer_size,
   3760                                FixedArray::kHeaderSize + kPointerSize));
   3761   __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
   3762   __ pop(key);
   3763   __ pop(vector);
   3764   __ pop(receiver);
   3765   __ mov(Operand::StaticVariable(virtual_register), handler);
   3766   __ pop(handler);  // Pop "value".
   3767   __ jmp(Operand::StaticVariable(virtual_register));
   3768 
   3769   __ bind(&prepare_next);
   3770   __ add(counter, Immediate(Smi::FromInt(2)));
   3771   __ cmp(counter, FieldOperand(feedback, FixedArray::kLengthOffset));
   3772   __ j(less, &next_loop);
   3773 
   3774   // We exhausted our array of map handler pairs.
   3775   __ bind(&pop_and_miss);
   3776   __ pop(key);
   3777   __ pop(vector);
   3778   __ pop(receiver);
   3779   __ jmp(miss);
   3780 
   3781   __ bind(&load_smi_map);
   3782   __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
   3783   __ jmp(&compare_map);
   3784 }
   3785 
   3786 
   3787 static void HandleMonomorphicStoreCase(MacroAssembler* masm, Register receiver,
   3788                                        Register key, Register vector,
   3789                                        Register slot, Register weak_cell,
   3790                                        Label* miss) {
   3791   // The store ic value is on the stack.
   3792   DCHECK(weak_cell.is(VectorStoreICDescriptor::ValueRegister()));
   3793   ExternalReference virtual_register =
   3794       ExternalReference::virtual_handler_register(masm->isolate());
   3795 
   3796   // feedback initially contains the feedback array
   3797   Label compare_smi_map;
   3798 
   3799   // Move the weak map into the weak_cell register.
   3800   Register ic_map = weak_cell;
   3801   __ mov(ic_map, FieldOperand(weak_cell, WeakCell::kValueOffset));
   3802 
   3803   // Receiver might not be a heap object.
   3804   __ JumpIfSmi(receiver, &compare_smi_map);
   3805   __ cmp(ic_map, FieldOperand(receiver, 0));
   3806   __ j(not_equal, miss);
   3807   __ mov(weak_cell, FieldOperand(vector, slot, times_half_pointer_size,
   3808                                  FixedArray::kHeaderSize + kPointerSize));
   3809   __ lea(weak_cell, FieldOperand(weak_cell, Code::kHeaderSize));
   3810   // Put the store ic value back in it's register.
   3811   __ mov(Operand::StaticVariable(virtual_register), weak_cell);
   3812   __ pop(weak_cell);  // Pop "value".
   3813   // jump to the handler.
   3814   __ jmp(Operand::StaticVariable(virtual_register));
   3815 
   3816   // In microbenchmarks, it made sense to unroll this code so that the call to
   3817   // the handler is duplicated for a HeapObject receiver and a Smi receiver.
   3818   __ bind(&compare_smi_map);
   3819   __ CompareRoot(ic_map, Heap::kHeapNumberMapRootIndex);
   3820   __ j(not_equal, miss);
   3821   __ mov(weak_cell, FieldOperand(vector, slot, times_half_pointer_size,
   3822                                  FixedArray::kHeaderSize + kPointerSize));
   3823   __ lea(weak_cell, FieldOperand(weak_cell, Code::kHeaderSize));
   3824   __ mov(Operand::StaticVariable(virtual_register), weak_cell);
   3825   __ pop(weak_cell);  // Pop "value".
   3826   // jump to the handler.
   3827   __ jmp(Operand::StaticVariable(virtual_register));
   3828 }
   3829 
   3830 
   3831 void VectorStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
   3832   Register receiver = VectorStoreICDescriptor::ReceiverRegister();  // edx
   3833   Register key = VectorStoreICDescriptor::NameRegister();           // ecx
   3834   Register value = VectorStoreICDescriptor::ValueRegister();        // eax
   3835   Register vector = VectorStoreICDescriptor::VectorRegister();      // ebx
   3836   Register slot = VectorStoreICDescriptor::SlotRegister();          // edi
   3837   Label miss;
   3838 
   3839   __ push(value);
   3840 
   3841   Register scratch = value;
   3842   __ mov(scratch, FieldOperand(vector, slot, times_half_pointer_size,
   3843                                FixedArray::kHeaderSize));
   3844 
   3845   // Is it a weak cell?
   3846   Label try_array;
   3847   Label not_array, smi_key, key_okay;
   3848   __ CompareRoot(FieldOperand(scratch, 0), Heap::kWeakCellMapRootIndex);
   3849   __ j(not_equal, &try_array);
   3850   HandleMonomorphicStoreCase(masm, receiver, key, vector, slot, scratch, &miss);
   3851 
   3852   // Is it a fixed array?
   3853   __ bind(&try_array);
   3854   __ CompareRoot(FieldOperand(scratch, 0), Heap::kFixedArrayMapRootIndex);
   3855   __ j(not_equal, &not_array);
   3856   HandlePolymorphicStoreCase(masm, receiver, key, vector, slot, scratch, true,
   3857                              &miss);
   3858 
   3859   __ bind(&not_array);
   3860   __ CompareRoot(scratch, Heap::kmegamorphic_symbolRootIndex);
   3861   __ j(not_equal, &miss);
   3862 
   3863   __ pop(value);
   3864   __ push(slot);
   3865   __ push(vector);
   3866   Code::Flags code_flags =
   3867       Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(Code::STORE_IC));
   3868   masm->isolate()->stub_cache()->GenerateProbe(masm, Code::STORE_IC, code_flags,
   3869                                                receiver, key, slot, no_reg);
   3870   __ pop(vector);
   3871   __ pop(slot);
   3872   Label no_pop_miss;
   3873   __ jmp(&no_pop_miss);
   3874 
   3875   __ bind(&miss);
   3876   __ pop(value);
   3877   __ bind(&no_pop_miss);
   3878   StoreIC::GenerateMiss(masm);
   3879 }
   3880 
   3881 
   3882 void VectorKeyedStoreICStub::Generate(MacroAssembler* masm) {
   3883   GenerateImpl(masm, false);
   3884 }
   3885 
   3886 
   3887 void VectorKeyedStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
   3888   GenerateImpl(masm, true);
   3889 }
   3890 
   3891 
   3892 static void HandlePolymorphicKeyedStoreCase(MacroAssembler* masm,
   3893                                             Register receiver, Register key,
   3894                                             Register vector, Register slot,
   3895                                             Register feedback, Label* miss) {
   3896   // feedback initially contains the feedback array
   3897   Label next, next_loop, prepare_next;
   3898   Label load_smi_map, compare_map;
   3899   Label transition_call;
   3900   Label pop_and_miss;
   3901   ExternalReference virtual_register =
   3902       ExternalReference::virtual_handler_register(masm->isolate());
   3903   ExternalReference virtual_slot =
   3904       ExternalReference::virtual_slot_register(masm->isolate());
   3905 
   3906   __ push(receiver);
   3907   __ push(vector);
   3908 
   3909   Register receiver_map = receiver;
   3910   Register cached_map = vector;
   3911   Register value = StoreDescriptor::ValueRegister();
   3912 
   3913   // Receiver might not be a heap object.
   3914   __ JumpIfSmi(receiver, &load_smi_map);
   3915   __ mov(receiver_map, FieldOperand(receiver, 0));
   3916   __ bind(&compare_map);
   3917 
   3918   // Polymorphic, we have to loop from 0 to N - 1
   3919   __ push(key);
   3920   // Current stack layout:
   3921   // - esp[0]    -- key
   3922   // - esp[4]    -- vector
   3923   // - esp[8]    -- receiver
   3924   // - esp[12]   -- value
   3925   // - esp[16]   -- return address
   3926   //
   3927   // Required stack layout for handler call:
   3928   // - esp[0]    -- return address
   3929   // - receiver, key, value, vector, slot in registers.
   3930   // - handler in virtual register.
   3931   Register counter = key;
   3932   __ mov(counter, Immediate(Smi::FromInt(0)));
   3933   __ bind(&next_loop);
   3934   __ mov(cached_map, FieldOperand(feedback, counter, times_half_pointer_size,
   3935                                   FixedArray::kHeaderSize));
   3936   __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
   3937   __ j(not_equal, &prepare_next);
   3938   __ mov(cached_map, FieldOperand(feedback, counter, times_half_pointer_size,
   3939                                   FixedArray::kHeaderSize + kPointerSize));
   3940   __ CompareRoot(cached_map, Heap::kUndefinedValueRootIndex);
   3941   __ j(not_equal, &transition_call);
   3942   __ mov(feedback, FieldOperand(feedback, counter, times_half_pointer_size,
   3943                                 FixedArray::kHeaderSize + 2 * kPointerSize));
   3944   __ pop(key);
   3945   __ pop(vector);
   3946   __ pop(receiver);
   3947   __ lea(feedback, FieldOperand(feedback, Code::kHeaderSize));
   3948   __ mov(Operand::StaticVariable(virtual_register), feedback);
   3949   __ pop(value);
   3950   __ jmp(Operand::StaticVariable(virtual_register));
   3951 
   3952   __ bind(&transition_call);
   3953   // Current stack layout:
   3954   // - esp[0]    -- key
   3955   // - esp[4]    -- vector
   3956   // - esp[8]    -- receiver
   3957   // - esp[12]   -- value
   3958   // - esp[16]   -- return address
   3959   //
   3960   // Required stack layout for handler call:
   3961   // - esp[0]    -- return address
   3962   // - receiver, key, value, map, vector in registers.
   3963   // - handler and slot in virtual registers.
   3964   __ mov(Operand::StaticVariable(virtual_slot), slot);
   3965   __ mov(feedback, FieldOperand(feedback, counter, times_half_pointer_size,
   3966                                 FixedArray::kHeaderSize + 2 * kPointerSize));
   3967   __ lea(feedback, FieldOperand(feedback, Code::kHeaderSize));
   3968   __ mov(Operand::StaticVariable(virtual_register), feedback);
   3969 
   3970   __ mov(cached_map, FieldOperand(cached_map, WeakCell::kValueOffset));
   3971   // The weak cell may have been cleared.
   3972   __ JumpIfSmi(cached_map, &pop_and_miss);
   3973   DCHECK(!cached_map.is(VectorStoreTransitionDescriptor::MapRegister()));
   3974   __ mov(VectorStoreTransitionDescriptor::MapRegister(), cached_map);
   3975 
   3976   // Pop key into place.
   3977   __ pop(key);
   3978   __ pop(vector);
   3979   __ pop(receiver);
   3980   __ pop(value);
   3981   __ jmp(Operand::StaticVariable(virtual_register));
   3982 
   3983   __ bind(&prepare_next);
   3984   __ add(counter, Immediate(Smi::FromInt(3)));
   3985   __ cmp(counter, FieldOperand(feedback, FixedArray::kLengthOffset));
   3986   __ j(less, &next_loop);
   3987 
   3988   // We exhausted our array of map handler pairs.
   3989   __ bind(&pop_and_miss);
   3990   __ pop(key);
   3991   __ pop(vector);
   3992   __ pop(receiver);
   3993   __ jmp(miss);
   3994 
   3995   __ bind(&load_smi_map);
   3996   __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
   3997   __ jmp(&compare_map);
   3998 }
   3999 
   4000 
   4001 void VectorKeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
   4002   Register receiver = VectorStoreICDescriptor::ReceiverRegister();  // edx
   4003   Register key = VectorStoreICDescriptor::NameRegister();           // ecx
   4004   Register value = VectorStoreICDescriptor::ValueRegister();        // eax
   4005   Register vector = VectorStoreICDescriptor::VectorRegister();      // ebx
   4006   Register slot = VectorStoreICDescriptor::SlotRegister();          // edi
   4007   Label miss;
   4008 
   4009   __ push(value);
   4010 
   4011   Register scratch = value;
   4012   __ mov(scratch, FieldOperand(vector, slot, times_half_pointer_size,
   4013                                FixedArray::kHeaderSize));
   4014 
   4015   // Is it a weak cell?
   4016   Label try_array;
   4017   Label not_array, smi_key, key_okay;
   4018   __ CompareRoot(FieldOperand(scratch, 0), Heap::kWeakCellMapRootIndex);
   4019   __ j(not_equal, &try_array);
   4020   HandleMonomorphicStoreCase(masm, receiver, key, vector, slot, scratch, &miss);
   4021 
   4022   // Is it a fixed array?
   4023   __ bind(&try_array);
   4024   __ CompareRoot(FieldOperand(scratch, 0), Heap::kFixedArrayMapRootIndex);
   4025   __ j(not_equal, &not_array);
   4026   HandlePolymorphicKeyedStoreCase(masm, receiver, key, vector, slot, scratch,
   4027                                   &miss);
   4028 
   4029   __ bind(&not_array);
   4030   Label try_poly_name;
   4031   __ CompareRoot(scratch, Heap::kmegamorphic_symbolRootIndex);
   4032   __ j(not_equal, &try_poly_name);
   4033 
   4034   __ pop(value);
   4035 
   4036   Handle<Code> megamorphic_stub =
   4037       KeyedStoreIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState());
   4038   __ jmp(megamorphic_stub, RelocInfo::CODE_TARGET);
   4039 
   4040   __ bind(&try_poly_name);
   4041   // We might have a name in feedback, and a fixed array in the next slot.
   4042   __ cmp(key, scratch);
   4043   __ j(not_equal, &miss);
   4044   // If the name comparison succeeded, we know we have a fixed array with
   4045   // at least one map/handler pair.
   4046   __ mov(scratch, FieldOperand(vector, slot, times_half_pointer_size,
   4047                                FixedArray::kHeaderSize + kPointerSize));
   4048   HandlePolymorphicStoreCase(masm, receiver, key, vector, slot, scratch, false,
   4049                              &miss);
   4050 
   4051   __ bind(&miss);
   4052   __ pop(value);
   4053   KeyedStoreIC::GenerateMiss(masm);
   4054 }
   4055 
   4056 
   4057 void CallICTrampolineStub::Generate(MacroAssembler* masm) {
   4058   __ EmitLoadTypeFeedbackVector(ebx);
   4059   CallICStub stub(isolate(), state());
   4060   __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
   4061 }
   4062 
   4063 
   4064 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
   4065   if (masm->isolate()->function_entry_hook() != NULL) {
   4066     ProfileEntryHookStub stub(masm->isolate());
   4067     masm->CallStub(&stub);
   4068   }
   4069 }
   4070 
   4071 
   4072 void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
   4073   // Save volatile registers.
   4074   const int kNumSavedRegisters = 3;
   4075   __ push(eax);
   4076   __ push(ecx);
   4077   __ push(edx);
   4078 
   4079   // Calculate and push the original stack pointer.
   4080   __ lea(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize));
   4081   __ push(eax);
   4082 
   4083   // Retrieve our return address and use it to calculate the calling
   4084   // function's address.
   4085   __ mov(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize));
   4086   __ sub(eax, Immediate(Assembler::kCallInstructionLength));
   4087   __ push(eax);
   4088 
   4089   // Call the entry hook.
   4090   DCHECK(isolate()->function_entry_hook() != NULL);
   4091   __ call(FUNCTION_ADDR(isolate()->function_entry_hook()),
   4092           RelocInfo::RUNTIME_ENTRY);
   4093   __ add(esp, Immediate(2 * kPointerSize));
   4094 
   4095   // Restore ecx.
   4096   __ pop(edx);
   4097   __ pop(ecx);
   4098   __ pop(eax);
   4099 
   4100   __ ret(0);
   4101 }
   4102 
   4103 
   4104 template<class T>
   4105 static void CreateArrayDispatch(MacroAssembler* masm,
   4106                                 AllocationSiteOverrideMode mode) {
   4107   if (mode == DISABLE_ALLOCATION_SITES) {
   4108     T stub(masm->isolate(),
   4109            GetInitialFastElementsKind(),
   4110            mode);
   4111     __ TailCallStub(&stub);
   4112   } else if (mode == DONT_OVERRIDE) {
   4113     int last_index = GetSequenceIndexFromFastElementsKind(
   4114         TERMINAL_FAST_ELEMENTS_KIND);
   4115     for (int i = 0; i <= last_index; ++i) {
   4116       Label next;
   4117       ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
   4118       __ cmp(edx, kind);
   4119       __ j(not_equal, &next);
   4120       T stub(masm->isolate(), kind);
   4121       __ TailCallStub(&stub);
   4122       __ bind(&next);
   4123     }
   4124 
   4125     // If we reached this point there is a problem.
   4126     __ Abort(kUnexpectedElementsKindInArrayConstructor);
   4127   } else {
   4128     UNREACHABLE();
   4129   }
   4130 }
   4131 
   4132 
   4133 static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
   4134                                            AllocationSiteOverrideMode mode) {
   4135   // ebx - allocation site (if mode != DISABLE_ALLOCATION_SITES)
   4136   // edx - kind (if mode != DISABLE_ALLOCATION_SITES)
   4137   // eax - number of arguments
   4138   // edi - constructor?
   4139   // esp[0] - return address
   4140   // esp[4] - last argument
   4141   Label normal_sequence;
   4142   if (mode == DONT_OVERRIDE) {
   4143     STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
   4144     STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
   4145     STATIC_ASSERT(FAST_ELEMENTS == 2);
   4146     STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
   4147     STATIC_ASSERT(FAST_DOUBLE_ELEMENTS == 4);
   4148     STATIC_ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5);
   4149 
   4150     // is the low bit set? If so, we are holey and that is good.
   4151     __ test_b(edx, Immediate(1));
   4152     __ j(not_zero, &normal_sequence);
   4153   }
   4154 
   4155   // look at the first argument
   4156   __ mov(ecx, Operand(esp, kPointerSize));
   4157   __ test(ecx, ecx);
   4158   __ j(zero, &normal_sequence);
   4159 
   4160   if (mode == DISABLE_ALLOCATION_SITES) {
   4161     ElementsKind initial = GetInitialFastElementsKind();
   4162     ElementsKind holey_initial = GetHoleyElementsKind(initial);
   4163 
   4164     ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
   4165                                                   holey_initial,
   4166                                                   DISABLE_ALLOCATION_SITES);
   4167     __ TailCallStub(&stub_holey);
   4168 
   4169     __ bind(&normal_sequence);
   4170     ArraySingleArgumentConstructorStub stub(masm->isolate(),
   4171                                             initial,
   4172                                             DISABLE_ALLOCATION_SITES);
   4173     __ TailCallStub(&stub);
   4174   } else if (mode == DONT_OVERRIDE) {
   4175     // We are going to create a holey array, but our kind is non-holey.
   4176     // Fix kind and retry.
   4177     __ inc(edx);
   4178 
   4179     if (FLAG_debug_code) {
   4180       Handle<Map> allocation_site_map =
   4181           masm->isolate()->factory()->allocation_site_map();
   4182       __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map));
   4183       __ Assert(equal, kExpectedAllocationSite);
   4184     }
   4185 
   4186     // Save the resulting elements kind in type info. We can't just store r3
   4187     // in the AllocationSite::transition_info field because elements kind is
   4188     // restricted to a portion of the field...upper bits need to be left alone.
   4189     STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
   4190     __ add(FieldOperand(ebx, AllocationSite::kTransitionInfoOffset),
   4191            Immediate(Smi::FromInt(kFastElementsKindPackedToHoley)));
   4192 
   4193     __ bind(&normal_sequence);
   4194     int last_index = GetSequenceIndexFromFastElementsKind(
   4195         TERMINAL_FAST_ELEMENTS_KIND);
   4196     for (int i = 0; i <= last_index; ++i) {
   4197       Label next;
   4198       ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
   4199       __ cmp(edx, kind);
   4200       __ j(not_equal, &next);
   4201       ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
   4202       __ TailCallStub(&stub);
   4203       __ bind(&next);
   4204     }
   4205 
   4206     // If we reached this point there is a problem.
   4207     __ Abort(kUnexpectedElementsKindInArrayConstructor);
   4208   } else {
   4209     UNREACHABLE();
   4210   }
   4211 }
   4212 
   4213 
   4214 template<class T>
   4215 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
   4216   int to_index = GetSequenceIndexFromFastElementsKind(
   4217       TERMINAL_FAST_ELEMENTS_KIND);
   4218   for (int i = 0; i <= to_index; ++i) {
   4219     ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
   4220     T stub(isolate, kind);
   4221     stub.GetCode();
   4222     if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) {
   4223       T stub1(isolate, kind, DISABLE_ALLOCATION_SITES);
   4224       stub1.GetCode();
   4225     }
   4226   }
   4227 }
   4228 
   4229 void CommonArrayConstructorStub::GenerateStubsAheadOfTime(Isolate* isolate) {
   4230   ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
   4231       isolate);
   4232   ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
   4233       isolate);
   4234   ArrayNArgumentsConstructorStub stub(isolate);
   4235   stub.GetCode();
   4236 
   4237   ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
   4238   for (int i = 0; i < 2; i++) {
   4239     // For internal arrays we only need a few things
   4240     InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]);
   4241     stubh1.GetCode();
   4242     InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
   4243     stubh2.GetCode();
   4244   }
   4245 }
   4246 
   4247 
   4248 void ArrayConstructorStub::GenerateDispatchToArrayStub(
   4249     MacroAssembler* masm,
   4250     AllocationSiteOverrideMode mode) {
   4251   if (argument_count() == ANY) {
   4252     Label not_zero_case, not_one_case;
   4253     __ test(eax, eax);
   4254     __ j(not_zero, &not_zero_case);
   4255     CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
   4256 
   4257     __ bind(&not_zero_case);
   4258     __ cmp(eax, 1);
   4259     __ j(greater, &not_one_case);
   4260     CreateArrayDispatchOneArgument(masm, mode);
   4261 
   4262     __ bind(&not_one_case);
   4263     ArrayNArgumentsConstructorStub stub(masm->isolate());
   4264     __ TailCallStub(&stub);
   4265   } else if (argument_count() == NONE) {
   4266     CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
   4267   } else if (argument_count() == ONE) {
   4268     CreateArrayDispatchOneArgument(masm, mode);
   4269   } else if (argument_count() == MORE_THAN_ONE) {
   4270     ArrayNArgumentsConstructorStub stub(masm->isolate());
   4271     __ TailCallStub(&stub);
   4272   } else {
   4273     UNREACHABLE();
   4274   }
   4275 }
   4276 
   4277 
   4278 void ArrayConstructorStub::Generate(MacroAssembler* masm) {
   4279   // ----------- S t a t e -------------
   4280   //  -- eax : argc (only if argument_count() is ANY or MORE_THAN_ONE)
   4281   //  -- ebx : AllocationSite or undefined
   4282   //  -- edi : constructor
   4283   //  -- edx : Original constructor
   4284   //  -- esp[0] : return address
   4285   //  -- esp[4] : last argument
   4286   // -----------------------------------
   4287   if (FLAG_debug_code) {
   4288     // The array construct code is only set for the global and natives
   4289     // builtin Array functions which always have maps.
   4290 
   4291     // Initial map for the builtin Array function should be a map.
   4292     __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
   4293     // Will both indicate a NULL and a Smi.
   4294     __ test(ecx, Immediate(kSmiTagMask));
   4295     __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
   4296     __ CmpObjectType(ecx, MAP_TYPE, ecx);
   4297     __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
   4298 
   4299     // We should either have undefined in ebx or a valid AllocationSite
   4300     __ AssertUndefinedOrAllocationSite(ebx);
   4301   }
   4302 
   4303   Label subclassing;
   4304 
   4305   // Enter the context of the Array function.
   4306   __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
   4307 
   4308   __ cmp(edx, edi);
   4309   __ j(not_equal, &subclassing);
   4310 
   4311   Label no_info;
   4312   // If the feedback vector is the undefined value call an array constructor
   4313   // that doesn't use AllocationSites.
   4314   __ cmp(ebx, isolate()->factory()->undefined_value());
   4315   __ j(equal, &no_info);
   4316 
   4317   // Only look at the lower 16 bits of the transition info.
   4318   __ mov(edx, FieldOperand(ebx, AllocationSite::kTransitionInfoOffset));
   4319   __ SmiUntag(edx);
   4320   STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
   4321   __ and_(edx, Immediate(AllocationSite::ElementsKindBits::kMask));
   4322   GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
   4323 
   4324   __ bind(&no_info);
   4325   GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
   4326 
   4327   // Subclassing.
   4328   __ bind(&subclassing);
   4329   switch (argument_count()) {
   4330     case ANY:
   4331     case MORE_THAN_ONE:
   4332       __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), edi);
   4333       __ add(eax, Immediate(3));
   4334       break;
   4335     case NONE:
   4336       __ mov(Operand(esp, 1 * kPointerSize), edi);
   4337       __ mov(eax, Immediate(3));
   4338       break;
   4339     case ONE:
   4340       __ mov(Operand(esp, 2 * kPointerSize), edi);
   4341       __ mov(eax, Immediate(4));
   4342       break;
   4343   }
   4344   __ PopReturnAddressTo(ecx);
   4345   __ Push(edx);
   4346   __ Push(ebx);
   4347   __ PushReturnAddressFrom(ecx);
   4348   __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate()));
   4349 }
   4350 
   4351 
   4352 void InternalArrayConstructorStub::GenerateCase(
   4353     MacroAssembler* masm, ElementsKind kind) {
   4354   Label not_zero_case, not_one_case;
   4355   Label normal_sequence;
   4356 
   4357   __ test(eax, eax);
   4358   __ j(not_zero, &not_zero_case);
   4359   InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
   4360   __ TailCallStub(&stub0);
   4361 
   4362   __ bind(&not_zero_case);
   4363   __ cmp(eax, 1);
   4364   __ j(greater, &not_one_case);
   4365 
   4366   if (IsFastPackedElementsKind(kind)) {
   4367     // We might need to create a holey array
   4368     // look at the first argument
   4369     __ mov(ecx, Operand(esp, kPointerSize));
   4370     __ test(ecx, ecx);
   4371     __ j(zero, &normal_sequence);
   4372 
   4373     InternalArraySingleArgumentConstructorStub
   4374         stub1_holey(isolate(), GetHoleyElementsKind(kind));
   4375     __ TailCallStub(&stub1_holey);
   4376   }
   4377 
   4378   __ bind(&normal_sequence);
   4379   InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
   4380   __ TailCallStub(&stub1);
   4381 
   4382   __ bind(&not_one_case);
   4383   ArrayNArgumentsConstructorStub stubN(isolate());
   4384   __ TailCallStub(&stubN);
   4385 }
   4386 
   4387 
   4388 void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
   4389   // ----------- S t a t e -------------
   4390   //  -- eax : argc
   4391   //  -- edi : constructor
   4392   //  -- esp[0] : return address
   4393   //  -- esp[4] : last argument
   4394   // -----------------------------------
   4395 
   4396   if (FLAG_debug_code) {
   4397     // The array construct code is only set for the global and natives
   4398     // builtin Array functions which always have maps.
   4399 
   4400     // Initial map for the builtin Array function should be a map.
   4401     __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
   4402     // Will both indicate a NULL and a Smi.
   4403     __ test(ecx, Immediate(kSmiTagMask));
   4404     __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
   4405     __ CmpObjectType(ecx, MAP_TYPE, ecx);
   4406     __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
   4407   }
   4408 
   4409   // Figure out the right elements kind
   4410   __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
   4411 
   4412   // Load the map's "bit field 2" into |result|. We only need the first byte,
   4413   // but the following masking takes care of that anyway.
   4414   __ mov(ecx, FieldOperand(ecx, Map::kBitField2Offset));
   4415   // Retrieve elements_kind from bit field 2.
   4416   __ DecodeField<Map::ElementsKindBits>(ecx);
   4417 
   4418   if (FLAG_debug_code) {
   4419     Label done;
   4420     __ cmp(ecx, Immediate(FAST_ELEMENTS));
   4421     __ j(equal, &done);
   4422     __ cmp(ecx, Immediate(FAST_HOLEY_ELEMENTS));
   4423     __ Assert(equal,
   4424               kInvalidElementsKindForInternalArrayOrInternalPackedArray);
   4425     __ bind(&done);
   4426   }
   4427 
   4428   Label fast_elements_case;
   4429   __ cmp(ecx, Immediate(FAST_ELEMENTS));
   4430   __ j(equal, &fast_elements_case);
   4431   GenerateCase(masm, FAST_HOLEY_ELEMENTS);
   4432 
   4433   __ bind(&fast_elements_case);
   4434   GenerateCase(masm, FAST_ELEMENTS);
   4435 }
   4436 
   4437 
   4438 void FastNewObjectStub::Generate(MacroAssembler* masm) {
   4439   // ----------- S t a t e -------------
   4440   //  -- edi    : target
   4441   //  -- edx    : new target
   4442   //  -- esi    : context
   4443   //  -- esp[0] : return address
   4444   // -----------------------------------
   4445   __ AssertFunction(edi);
   4446   __ AssertReceiver(edx);
   4447 
   4448   // Verify that the new target is a JSFunction.
   4449   Label new_object;
   4450   __ CmpObjectType(edx, JS_FUNCTION_TYPE, ebx);
   4451   __ j(not_equal, &new_object);
   4452 
   4453   // Load the initial map and verify that it's in fact a map.
   4454   __ mov(ecx, FieldOperand(edx, JSFunction::kPrototypeOrInitialMapOffset));
   4455   __ JumpIfSmi(ecx, &new_object);
   4456   __ CmpObjectType(ecx, MAP_TYPE, ebx);
   4457   __ j(not_equal, &new_object);
   4458 
   4459   // Fall back to runtime if the target differs from the new target's
   4460   // initial map constructor.
   4461   __ cmp(edi, FieldOperand(ecx, Map::kConstructorOrBackPointerOffset));
   4462   __ j(not_equal, &new_object);
   4463 
   4464   // Allocate the JSObject on the heap.
   4465   Label allocate, done_allocate;
   4466   __ movzx_b(ebx, FieldOperand(ecx, Map::kInstanceSizeOffset));
   4467   __ lea(ebx, Operand(ebx, times_pointer_size, 0));
   4468   __ Allocate(ebx, eax, edi, no_reg, &allocate, NO_ALLOCATION_FLAGS);
   4469   __ bind(&done_allocate);
   4470 
   4471   // Initialize the JSObject fields.
   4472   __ mov(FieldOperand(eax, JSObject::kMapOffset), ecx);
   4473   __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
   4474          masm->isolate()->factory()->empty_fixed_array());
   4475   __ mov(FieldOperand(eax, JSObject::kElementsOffset),
   4476          masm->isolate()->factory()->empty_fixed_array());
   4477   STATIC_ASSERT(JSObject::kHeaderSize == 3 * kPointerSize);
   4478   __ lea(ebx, FieldOperand(eax, JSObject::kHeaderSize));
   4479 
   4480   // ----------- S t a t e -------------
   4481   //  -- eax    : result (tagged)
   4482   //  -- ebx    : result fields (untagged)
   4483   //  -- edi    : result end (untagged)
   4484   //  -- ecx    : initial map
   4485   //  -- esi    : context
   4486   //  -- esp[0] : return address
   4487   // -----------------------------------
   4488 
   4489   // Perform in-object slack tracking if requested.
   4490   Label slack_tracking;
   4491   STATIC_ASSERT(Map::kNoSlackTracking == 0);
   4492   __ test(FieldOperand(ecx, Map::kBitField3Offset),
   4493           Immediate(Map::ConstructionCounter::kMask));
   4494   __ j(not_zero, &slack_tracking, Label::kNear);
   4495   {
   4496     // Initialize all in-object fields with undefined.
   4497     __ LoadRoot(edx, Heap::kUndefinedValueRootIndex);
   4498     __ InitializeFieldsWithFiller(ebx, edi, edx);
   4499     __ Ret();
   4500   }
   4501   __ bind(&slack_tracking);
   4502   {
   4503     // Decrease generous allocation count.
   4504     STATIC_ASSERT(Map::ConstructionCounter::kNext == 32);
   4505     __ sub(FieldOperand(ecx, Map::kBitField3Offset),
   4506            Immediate(1 << Map::ConstructionCounter::kShift));
   4507 
   4508     // Initialize the in-object fields with undefined.
   4509     __ movzx_b(edx, FieldOperand(ecx, Map::kUnusedPropertyFieldsOffset));
   4510     __ neg(edx);
   4511     __ lea(edx, Operand(edi, edx, times_pointer_size, 0));
   4512     __ LoadRoot(edi, Heap::kUndefinedValueRootIndex);
   4513     __ InitializeFieldsWithFiller(ebx, edx, edi);
   4514 
   4515     // Initialize the remaining (reserved) fields with one pointer filler map.
   4516     __ movzx_b(edx, FieldOperand(ecx, Map::kUnusedPropertyFieldsOffset));
   4517     __ lea(edx, Operand(ebx, edx, times_pointer_size, 0));
   4518     __ LoadRoot(edi, Heap::kOnePointerFillerMapRootIndex);
   4519     __ InitializeFieldsWithFiller(ebx, edx, edi);
   4520 
   4521     // Check if we can finalize the instance size.
   4522     Label finalize;
   4523     STATIC_ASSERT(Map::kSlackTrackingCounterEnd == 1);
   4524     __ test(FieldOperand(ecx, Map::kBitField3Offset),
   4525             Immediate(Map::ConstructionCounter::kMask));
   4526     __ j(zero, &finalize, Label::kNear);
   4527     __ Ret();
   4528 
   4529     // Finalize the instance size.
   4530     __ bind(&finalize);
   4531     {
   4532       FrameScope scope(masm, StackFrame::INTERNAL);
   4533       __ Push(eax);
   4534       __ Push(ecx);
   4535       __ CallRuntime(Runtime::kFinalizeInstanceSize);
   4536       __ Pop(eax);
   4537     }
   4538     __ Ret();
   4539   }
   4540 
   4541   // Fall back to %AllocateInNewSpace.
   4542   __ bind(&allocate);
   4543   {
   4544     FrameScope scope(masm, StackFrame::INTERNAL);
   4545     __ SmiTag(ebx);
   4546     __ Push(ecx);
   4547     __ Push(ebx);
   4548     __ CallRuntime(Runtime::kAllocateInNewSpace);
   4549     __ Pop(ecx);
   4550   }
   4551   __ movzx_b(ebx, FieldOperand(ecx, Map::kInstanceSizeOffset));
   4552   __ lea(edi, Operand(eax, ebx, times_pointer_size, 0));
   4553   STATIC_ASSERT(kHeapObjectTag == 1);
   4554   __ dec(edi);
   4555   __ jmp(&done_allocate);
   4556 
   4557   // Fall back to %NewObject.
   4558   __ bind(&new_object);
   4559   __ PopReturnAddressTo(ecx);
   4560   __ Push(edi);
   4561   __ Push(edx);
   4562   __ PushReturnAddressFrom(ecx);
   4563   __ TailCallRuntime(Runtime::kNewObject);
   4564 }
   4565 
   4566 
   4567 void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
   4568   // ----------- S t a t e -------------
   4569   //  -- edi    : function
   4570   //  -- esi    : context
   4571   //  -- ebp    : frame pointer
   4572   //  -- esp[0] : return address
   4573   // -----------------------------------
   4574   __ AssertFunction(edi);
   4575 
   4576   // Make edx point to the JavaScript frame.
   4577   __ mov(edx, ebp);
   4578   if (skip_stub_frame()) {
   4579     // For Ignition we need to skip the handler/stub frame to reach the
   4580     // JavaScript frame for the function.
   4581     __ mov(edx, Operand(edx, StandardFrameConstants::kCallerFPOffset));
   4582   }
   4583   if (FLAG_debug_code) {
   4584     Label ok;
   4585     __ cmp(edi, Operand(edx, StandardFrameConstants::kFunctionOffset));
   4586     __ j(equal, &ok);
   4587     __ Abort(kInvalidFrameForFastNewRestArgumentsStub);
   4588     __ bind(&ok);
   4589   }
   4590 
   4591   // Check if we have rest parameters (only possible if we have an
   4592   // arguments adaptor frame below the function frame).
   4593   Label no_rest_parameters;
   4594   __ mov(ebx, Operand(edx, StandardFrameConstants::kCallerFPOffset));
   4595   __ cmp(Operand(ebx, CommonFrameConstants::kContextOrFrameTypeOffset),
   4596          Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
   4597   __ j(not_equal, &no_rest_parameters, Label::kNear);
   4598 
   4599   // Check if the arguments adaptor frame contains more arguments than
   4600   // specified by the function's internal formal parameter count.
   4601   Label rest_parameters;
   4602   __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
   4603   __ mov(eax, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
   4604   __ sub(eax,
   4605          FieldOperand(ecx, SharedFunctionInfo::kFormalParameterCountOffset));
   4606   __ j(greater, &rest_parameters);
   4607 
   4608   // Return an empty rest parameter array.
   4609   __ bind(&no_rest_parameters);
   4610   {
   4611     // ----------- S t a t e -------------
   4612     //  -- esi    : context
   4613     //  -- esp[0] : return address
   4614     // -----------------------------------
   4615 
   4616     // Allocate an empty rest parameter array.
   4617     Label allocate, done_allocate;
   4618     __ Allocate(JSArray::kSize, eax, edx, ecx, &allocate, NO_ALLOCATION_FLAGS);
   4619     __ bind(&done_allocate);
   4620 
   4621     // Setup the rest parameter array in rax.
   4622     __ LoadGlobalFunction(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, ecx);
   4623     __ mov(FieldOperand(eax, JSArray::kMapOffset), ecx);
   4624     __ mov(ecx, isolate()->factory()->empty_fixed_array());
   4625     __ mov(FieldOperand(eax, JSArray::kPropertiesOffset), ecx);
   4626     __ mov(FieldOperand(eax, JSArray::kElementsOffset), ecx);
   4627     __ mov(FieldOperand(eax, JSArray::kLengthOffset),
   4628            Immediate(Smi::FromInt(0)));
   4629     STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
   4630     __ Ret();
   4631 
   4632     // Fall back to %AllocateInNewSpace.
   4633     __ bind(&allocate);
   4634     {
   4635       FrameScope scope(masm, StackFrame::INTERNAL);
   4636       __ Push(Smi::FromInt(JSArray::kSize));
   4637       __ CallRuntime(Runtime::kAllocateInNewSpace);
   4638     }
   4639     __ jmp(&done_allocate);
   4640   }
   4641 
   4642   __ bind(&rest_parameters);
   4643   {
   4644     // Compute the pointer to the first rest parameter (skippping the receiver).
   4645     __ lea(ebx,
   4646            Operand(ebx, eax, times_half_pointer_size,
   4647                    StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize));
   4648 
   4649     // ----------- S t a t e -------------
   4650     //  -- esi    : context
   4651     //  -- eax    : number of rest parameters (tagged)
   4652     //  -- ebx    : pointer to first rest parameters
   4653     //  -- esp[0] : return address
   4654     // -----------------------------------
   4655 
   4656     // Allocate space for the rest parameter array plus the backing store.
   4657     Label allocate, done_allocate;
   4658     __ lea(ecx, Operand(eax, times_half_pointer_size,
   4659                         JSArray::kSize + FixedArray::kHeaderSize));
   4660     __ Allocate(ecx, edx, edi, no_reg, &allocate, NO_ALLOCATION_FLAGS);
   4661     __ bind(&done_allocate);
   4662 
   4663     // Setup the elements array in edx.
   4664     __ mov(FieldOperand(edx, FixedArray::kMapOffset),
   4665            isolate()->factory()->fixed_array_map());
   4666     __ mov(FieldOperand(edx, FixedArray::kLengthOffset), eax);
   4667     {
   4668       Label loop, done_loop;
   4669       __ Move(ecx, Smi::FromInt(0));
   4670       __ bind(&loop);
   4671       __ cmp(ecx, eax);
   4672       __ j(equal, &done_loop, Label::kNear);
   4673       __ mov(edi, Operand(ebx, 0 * kPointerSize));
   4674       __ mov(FieldOperand(edx, ecx, times_half_pointer_size,
   4675                           FixedArray::kHeaderSize),
   4676              edi);
   4677       __ sub(ebx, Immediate(1 * kPointerSize));
   4678       __ add(ecx, Immediate(Smi::FromInt(1)));
   4679       __ jmp(&loop);
   4680       __ bind(&done_loop);
   4681     }
   4682 
   4683     // Setup the rest parameter array in edi.
   4684     __ lea(edi,
   4685            Operand(edx, eax, times_half_pointer_size, FixedArray::kHeaderSize));
   4686     __ LoadGlobalFunction(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, ecx);
   4687     __ mov(FieldOperand(edi, JSArray::kMapOffset), ecx);
   4688     __ mov(FieldOperand(edi, JSArray::kPropertiesOffset),
   4689            isolate()->factory()->empty_fixed_array());
   4690     __ mov(FieldOperand(edi, JSArray::kElementsOffset), edx);
   4691     __ mov(FieldOperand(edi, JSArray::kLengthOffset), eax);
   4692     STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
   4693     __ mov(eax, edi);
   4694     __ Ret();
   4695 
   4696     // Fall back to %AllocateInNewSpace (if not too big).
   4697     Label too_big_for_new_space;
   4698     __ bind(&allocate);
   4699     __ cmp(ecx, Immediate(Page::kMaxRegularHeapObjectSize));
   4700     __ j(greater, &too_big_for_new_space);
   4701     {
   4702       FrameScope scope(masm, StackFrame::INTERNAL);
   4703       __ SmiTag(ecx);
   4704       __ Push(eax);
   4705       __ Push(ebx);
   4706       __ Push(ecx);
   4707       __ CallRuntime(Runtime::kAllocateInNewSpace);
   4708       __ mov(edx, eax);
   4709       __ Pop(ebx);
   4710       __ Pop(eax);
   4711     }
   4712     __ jmp(&done_allocate);
   4713 
   4714     // Fall back to %NewRestParameter.
   4715     __ bind(&too_big_for_new_space);
   4716     __ PopReturnAddressTo(ecx);
   4717     // We reload the function from the caller frame due to register pressure
   4718     // within this stub. This is the slow path, hence reloading is preferable.
   4719     if (skip_stub_frame()) {
   4720       // For Ignition we need to skip the handler/stub frame to reach the
   4721       // JavaScript frame for the function.
   4722       __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
   4723       __ Push(Operand(edx, StandardFrameConstants::kFunctionOffset));
   4724     } else {
   4725       __ Push(Operand(ebp, StandardFrameConstants::kFunctionOffset));
   4726     }
   4727     __ PushReturnAddressFrom(ecx);
   4728     __ TailCallRuntime(Runtime::kNewRestParameter);
   4729   }
   4730 }
   4731 
   4732 
   4733 void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
   4734   // ----------- S t a t e -------------
   4735   //  -- edi    : function
   4736   //  -- esi    : context
   4737   //  -- ebp    : frame pointer
   4738   //  -- esp[0] : return address
   4739   // -----------------------------------
   4740   __ AssertFunction(edi);
   4741 
   4742   // Make ecx point to the JavaScript frame.
   4743   __ mov(ecx, ebp);
   4744   if (skip_stub_frame()) {
   4745     // For Ignition we need to skip the handler/stub frame to reach the
   4746     // JavaScript frame for the function.
   4747     __ mov(ecx, Operand(ecx, StandardFrameConstants::kCallerFPOffset));
   4748   }
   4749   if (FLAG_debug_code) {
   4750     Label ok;
   4751     __ cmp(edi, Operand(ecx, StandardFrameConstants::kFunctionOffset));
   4752     __ j(equal, &ok);
   4753     __ Abort(kInvalidFrameForFastNewSloppyArgumentsStub);
   4754     __ bind(&ok);
   4755   }
   4756 
   4757   // TODO(bmeurer): Cleanup to match the FastNewStrictArgumentsStub.
   4758   __ mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
   4759   __ mov(ebx,
   4760          FieldOperand(ebx, SharedFunctionInfo::kFormalParameterCountOffset));
   4761   __ lea(edx, Operand(ecx, ebx, times_half_pointer_size,
   4762                       StandardFrameConstants::kCallerSPOffset));
   4763 
   4764   // ebx : number of parameters (tagged)
   4765   // edx : parameters pointer
   4766   // edi : function
   4767   // ecx : JavaScript frame pointer.
   4768   // esp[0] : return address
   4769 
   4770   // Check if the calling frame is an arguments adaptor frame.
   4771   Label adaptor_frame, try_allocate, runtime;
   4772   __ mov(eax, Operand(ecx, StandardFrameConstants::kCallerFPOffset));
   4773   __ mov(eax, Operand(eax, CommonFrameConstants::kContextOrFrameTypeOffset));
   4774   __ cmp(eax, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
   4775   __ j(equal, &adaptor_frame, Label::kNear);
   4776 
   4777   // No adaptor, parameter count = argument count.
   4778   __ mov(ecx, ebx);
   4779   __ push(ebx);
   4780   __ jmp(&try_allocate, Label::kNear);
   4781 
   4782   // We have an adaptor frame. Patch the parameters pointer.
   4783   __ bind(&adaptor_frame);
   4784   __ push(ebx);
   4785   __ mov(edx, Operand(ecx, StandardFrameConstants::kCallerFPOffset));
   4786   __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
   4787   __ lea(edx, Operand(edx, ecx, times_2,
   4788                       StandardFrameConstants::kCallerSPOffset));
   4789 
   4790   // ebx = parameter count (tagged)
   4791   // ecx = argument count (smi-tagged)
   4792   // Compute the mapped parameter count = min(ebx, ecx) in ebx.
   4793   __ cmp(ebx, ecx);
   4794   __ j(less_equal, &try_allocate, Label::kNear);
   4795   __ mov(ebx, ecx);
   4796 
   4797   // Save mapped parameter count and function.
   4798   __ bind(&try_allocate);
   4799   __ push(edi);
   4800   __ push(ebx);
   4801 
   4802   // Compute the sizes of backing store, parameter map, and arguments object.
   4803   // 1. Parameter map, has 2 extra words containing context and backing store.
   4804   const int kParameterMapHeaderSize =
   4805       FixedArray::kHeaderSize + 2 * kPointerSize;
   4806   Label no_parameter_map;
   4807   __ test(ebx, ebx);
   4808   __ j(zero, &no_parameter_map, Label::kNear);
   4809   __ lea(ebx, Operand(ebx, times_2, kParameterMapHeaderSize));
   4810   __ bind(&no_parameter_map);
   4811 
   4812   // 2. Backing store.
   4813   __ lea(ebx, Operand(ebx, ecx, times_2, FixedArray::kHeaderSize));
   4814 
   4815   // 3. Arguments object.
   4816   __ add(ebx, Immediate(JSSloppyArgumentsObject::kSize));
   4817 
   4818   // Do the allocation of all three objects in one go.
   4819   __ Allocate(ebx, eax, edi, no_reg, &runtime, NO_ALLOCATION_FLAGS);
   4820 
   4821   // eax = address of new object(s) (tagged)
   4822   // ecx = argument count (smi-tagged)
   4823   // esp[0] = mapped parameter count (tagged)
   4824   // esp[4] = function
   4825   // esp[8] = parameter count (tagged)
   4826   // Get the arguments map from the current native context into edi.
   4827   Label has_mapped_parameters, instantiate;
   4828   __ mov(edi, NativeContextOperand());
   4829   __ mov(ebx, Operand(esp, 0 * kPointerSize));
   4830   __ test(ebx, ebx);
   4831   __ j(not_zero, &has_mapped_parameters, Label::kNear);
   4832   __ mov(
   4833       edi,
   4834       Operand(edi, Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX)));
   4835   __ jmp(&instantiate, Label::kNear);
   4836 
   4837   __ bind(&has_mapped_parameters);
   4838   __ mov(edi, Operand(edi, Context::SlotOffset(
   4839                                Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX)));
   4840   __ bind(&instantiate);
   4841 
   4842   // eax = address of new object (tagged)
   4843   // ebx = mapped parameter count (tagged)
   4844   // ecx = argument count (smi-tagged)
   4845   // edi = address of arguments map (tagged)
   4846   // esp[0] = mapped parameter count (tagged)
   4847   // esp[4] = function
   4848   // esp[8] = parameter count (tagged)
   4849   // Copy the JS object part.
   4850   __ mov(FieldOperand(eax, JSObject::kMapOffset), edi);
   4851   __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
   4852          masm->isolate()->factory()->empty_fixed_array());
   4853   __ mov(FieldOperand(eax, JSObject::kElementsOffset),
   4854          masm->isolate()->factory()->empty_fixed_array());
   4855 
   4856   // Set up the callee in-object property.
   4857   STATIC_ASSERT(JSSloppyArgumentsObject::kCalleeIndex == 1);
   4858   __ mov(edi, Operand(esp, 1 * kPointerSize));
   4859   __ AssertNotSmi(edi);
   4860   __ mov(FieldOperand(eax, JSSloppyArgumentsObject::kCalleeOffset), edi);
   4861 
   4862   // Use the length (smi tagged) and set that as an in-object property too.
   4863   __ AssertSmi(ecx);
   4864   __ mov(FieldOperand(eax, JSSloppyArgumentsObject::kLengthOffset), ecx);
   4865 
   4866   // Set up the elements pointer in the allocated arguments object.
   4867   // If we allocated a parameter map, edi will point there, otherwise to the
   4868   // backing store.
   4869   __ lea(edi, Operand(eax, JSSloppyArgumentsObject::kSize));
   4870   __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
   4871 
   4872   // eax = address of new object (tagged)
   4873   // ebx = mapped parameter count (tagged)
   4874   // ecx = argument count (tagged)
   4875   // edx = address of receiver argument
   4876   // edi = address of parameter map or backing store (tagged)
   4877   // esp[0] = mapped parameter count (tagged)
   4878   // esp[4] = function
   4879   // esp[8] = parameter count (tagged)
   4880   // Free two registers.
   4881   __ push(edx);
   4882   __ push(eax);
   4883 
   4884   // Initialize parameter map. If there are no mapped arguments, we're done.
   4885   Label skip_parameter_map;
   4886   __ test(ebx, ebx);
   4887   __ j(zero, &skip_parameter_map);
   4888 
   4889   __ mov(FieldOperand(edi, FixedArray::kMapOffset),
   4890          Immediate(isolate()->factory()->sloppy_arguments_elements_map()));
   4891   __ lea(eax, Operand(ebx, reinterpret_cast<intptr_t>(Smi::FromInt(2))));
   4892   __ mov(FieldOperand(edi, FixedArray::kLengthOffset), eax);
   4893   __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 0 * kPointerSize), esi);
   4894   __ lea(eax, Operand(edi, ebx, times_2, kParameterMapHeaderSize));
   4895   __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 1 * kPointerSize), eax);
   4896 
   4897   // Copy the parameter slots and the holes in the arguments.
   4898   // We need to fill in mapped_parameter_count slots. They index the context,
   4899   // where parameters are stored in reverse order, at
   4900   //   MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
   4901   // The mapped parameter thus need to get indices
   4902   //   MIN_CONTEXT_SLOTS+parameter_count-1 ..
   4903   //       MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
   4904   // We loop from right to left.
   4905   Label parameters_loop, parameters_test;
   4906   __ push(ecx);
   4907   __ mov(eax, Operand(esp, 3 * kPointerSize));
   4908   __ mov(ebx, Immediate(Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
   4909   __ add(ebx, Operand(esp, 5 * kPointerSize));
   4910   __ sub(ebx, eax);
   4911   __ mov(ecx, isolate()->factory()->the_hole_value());
   4912   __ mov(edx, edi);
   4913   __ lea(edi, Operand(edi, eax, times_2, kParameterMapHeaderSize));
   4914   // eax = loop variable (tagged)
   4915   // ebx = mapping index (tagged)
   4916   // ecx = the hole value
   4917   // edx = address of parameter map (tagged)
   4918   // edi = address of backing store (tagged)
   4919   // esp[0] = argument count (tagged)
   4920   // esp[4] = address of new object (tagged)
   4921   // esp[8] = address of receiver argument
   4922   // esp[12] = mapped parameter count (tagged)
   4923   // esp[16] = function
   4924   // esp[20] = parameter count (tagged)
   4925   __ jmp(&parameters_test, Label::kNear);
   4926 
   4927   __ bind(&parameters_loop);
   4928   __ sub(eax, Immediate(Smi::FromInt(1)));
   4929   __ mov(FieldOperand(edx, eax, times_2, kParameterMapHeaderSize), ebx);
   4930   __ mov(FieldOperand(edi, eax, times_2, FixedArray::kHeaderSize), ecx);
   4931   __ add(ebx, Immediate(Smi::FromInt(1)));
   4932   __ bind(&parameters_test);
   4933   __ test(eax, eax);
   4934   __ j(not_zero, &parameters_loop, Label::kNear);
   4935   __ pop(ecx);
   4936 
   4937   __ bind(&skip_parameter_map);
   4938 
   4939   // ecx = argument count (tagged)
   4940   // edi = address of backing store (tagged)
   4941   // esp[0] = address of new object (tagged)
   4942   // esp[4] = address of receiver argument
   4943   // esp[8] = mapped parameter count (tagged)
   4944   // esp[12] = function
   4945   // esp[16] = parameter count (tagged)
   4946   // Copy arguments header and remaining slots (if there are any).
   4947   __ mov(FieldOperand(edi, FixedArray::kMapOffset),
   4948          Immediate(isolate()->factory()->fixed_array_map()));
   4949   __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
   4950 
   4951   Label arguments_loop, arguments_test;
   4952   __ mov(ebx, Operand(esp, 2 * kPointerSize));
   4953   __ mov(edx, Operand(esp, 1 * kPointerSize));
   4954   __ sub(edx, ebx);  // Is there a smarter way to do negative scaling?
   4955   __ sub(edx, ebx);
   4956   __ jmp(&arguments_test, Label::kNear);
   4957 
   4958   __ bind(&arguments_loop);
   4959   __ sub(edx, Immediate(kPointerSize));
   4960   __ mov(eax, Operand(edx, 0));
   4961   __ mov(FieldOperand(edi, ebx, times_2, FixedArray::kHeaderSize), eax);
   4962   __ add(ebx, Immediate(Smi::FromInt(1)));
   4963 
   4964   __ bind(&arguments_test);
   4965   __ cmp(ebx, ecx);
   4966   __ j(less, &arguments_loop, Label::kNear);
   4967 
   4968   // Restore.
   4969   __ pop(eax);  // Address of arguments object.
   4970   __ Drop(4);
   4971 
   4972   // Return.
   4973   __ ret(0);
   4974 
   4975   // Do the runtime call to allocate the arguments object.
   4976   __ bind(&runtime);
   4977   __ pop(eax);   // Remove saved mapped parameter count.
   4978   __ pop(edi);   // Pop saved function.
   4979   __ pop(eax);   // Remove saved parameter count.
   4980   __ pop(eax);   // Pop return address.
   4981   __ push(edi);  // Push function.
   4982   __ push(edx);  // Push parameters pointer.
   4983   __ push(ecx);  // Push parameter count.
   4984   __ push(eax);  // Push return address.
   4985   __ TailCallRuntime(Runtime::kNewSloppyArguments);
   4986 }
   4987 
   4988 
   4989 void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
   4990   // ----------- S t a t e -------------
   4991   //  -- edi    : function
   4992   //  -- esi    : context
   4993   //  -- ebp    : frame pointer
   4994   //  -- esp[0] : return address
   4995   // -----------------------------------
   4996   __ AssertFunction(edi);
   4997 
   4998   // Make edx point to the JavaScript frame.
   4999   __ mov(edx, ebp);
   5000   if (skip_stub_frame()) {
   5001     // For Ignition we need to skip the handler/stub frame to reach the
   5002     // JavaScript frame for the function.
   5003     __ mov(edx, Operand(edx, StandardFrameConstants::kCallerFPOffset));
   5004   }
   5005   if (FLAG_debug_code) {
   5006     Label ok;
   5007     __ cmp(edi, Operand(edx, StandardFrameConstants::kFunctionOffset));
   5008     __ j(equal, &ok);
   5009     __ Abort(kInvalidFrameForFastNewStrictArgumentsStub);
   5010     __ bind(&ok);
   5011   }
   5012 
   5013   // Check if we have an arguments adaptor frame below the function frame.
   5014   Label arguments_adaptor, arguments_done;
   5015   __ mov(ebx, Operand(edx, StandardFrameConstants::kCallerFPOffset));
   5016   __ cmp(Operand(ebx, CommonFrameConstants::kContextOrFrameTypeOffset),
   5017          Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
   5018   __ j(equal, &arguments_adaptor, Label::kNear);
   5019   {
   5020     __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
   5021     __ mov(eax,
   5022            FieldOperand(eax, SharedFunctionInfo::kFormalParameterCountOffset));
   5023     __ lea(ebx,
   5024            Operand(edx, eax, times_half_pointer_size,
   5025                    StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize));
   5026   }
   5027   __ jmp(&arguments_done, Label::kNear);
   5028   __ bind(&arguments_adaptor);
   5029   {
   5030     __ mov(eax, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
   5031     __ lea(ebx,
   5032            Operand(ebx, eax, times_half_pointer_size,
   5033                    StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize));
   5034   }
   5035   __ bind(&arguments_done);
   5036 
   5037   // ----------- S t a t e -------------
   5038   //  -- eax    : number of arguments (tagged)
   5039   //  -- ebx    : pointer to the first argument
   5040   //  -- esi    : context
   5041   //  -- esp[0] : return address
   5042   // -----------------------------------
   5043 
   5044   // Allocate space for the strict arguments object plus the backing store.
   5045   Label allocate, done_allocate;
   5046   __ lea(ecx,
   5047          Operand(eax, times_half_pointer_size,
   5048                  JSStrictArgumentsObject::kSize + FixedArray::kHeaderSize));
   5049   __ Allocate(ecx, edx, edi, no_reg, &allocate, NO_ALLOCATION_FLAGS);
   5050   __ bind(&done_allocate);
   5051 
   5052   // Setup the elements array in edx.
   5053   __ mov(FieldOperand(edx, FixedArray::kMapOffset),
   5054          isolate()->factory()->fixed_array_map());
   5055   __ mov(FieldOperand(edx, FixedArray::kLengthOffset), eax);
   5056   {
   5057     Label loop, done_loop;
   5058     __ Move(ecx, Smi::FromInt(0));
   5059     __ bind(&loop);
   5060     __ cmp(ecx, eax);
   5061     __ j(equal, &done_loop, Label::kNear);
   5062     __ mov(edi, Operand(ebx, 0 * kPointerSize));
   5063     __ mov(FieldOperand(edx, ecx, times_half_pointer_size,
   5064                         FixedArray::kHeaderSize),
   5065            edi);
   5066     __ sub(ebx, Immediate(1 * kPointerSize));
   5067     __ add(ecx, Immediate(Smi::FromInt(1)));
   5068     __ jmp(&loop);
   5069     __ bind(&done_loop);
   5070   }
   5071 
   5072   // Setup the rest parameter array in edi.
   5073   __ lea(edi,
   5074          Operand(edx, eax, times_half_pointer_size, FixedArray::kHeaderSize));
   5075   __ LoadGlobalFunction(Context::STRICT_ARGUMENTS_MAP_INDEX, ecx);
   5076   __ mov(FieldOperand(edi, JSStrictArgumentsObject::kMapOffset), ecx);
   5077   __ mov(FieldOperand(edi, JSStrictArgumentsObject::kPropertiesOffset),
   5078          isolate()->factory()->empty_fixed_array());
   5079   __ mov(FieldOperand(edi, JSStrictArgumentsObject::kElementsOffset), edx);
   5080   __ mov(FieldOperand(edi, JSStrictArgumentsObject::kLengthOffset), eax);
   5081   STATIC_ASSERT(JSStrictArgumentsObject::kSize == 4 * kPointerSize);
   5082   __ mov(eax, edi);
   5083   __ Ret();
   5084 
   5085   // Fall back to %AllocateInNewSpace (if not too big).
   5086   Label too_big_for_new_space;
   5087   __ bind(&allocate);
   5088   __ cmp(ecx, Immediate(Page::kMaxRegularHeapObjectSize));
   5089   __ j(greater, &too_big_for_new_space);
   5090   {
   5091     FrameScope scope(masm, StackFrame::INTERNAL);
   5092     __ SmiTag(ecx);
   5093     __ Push(eax);
   5094     __ Push(ebx);
   5095     __ Push(ecx);
   5096     __ CallRuntime(Runtime::kAllocateInNewSpace);
   5097     __ mov(edx, eax);
   5098     __ Pop(ebx);
   5099     __ Pop(eax);
   5100   }
   5101   __ jmp(&done_allocate);
   5102 
   5103   // Fall back to %NewStrictArguments.
   5104   __ bind(&too_big_for_new_space);
   5105   __ PopReturnAddressTo(ecx);
   5106   // We reload the function from the caller frame due to register pressure
   5107   // within this stub. This is the slow path, hence reloading is preferable.
   5108   if (skip_stub_frame()) {
   5109     // For Ignition we need to skip the handler/stub frame to reach the
   5110     // JavaScript frame for the function.
   5111     __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
   5112     __ Push(Operand(edx, StandardFrameConstants::kFunctionOffset));
   5113   } else {
   5114     __ Push(Operand(ebp, StandardFrameConstants::kFunctionOffset));
   5115   }
   5116   __ PushReturnAddressFrom(ecx);
   5117   __ TailCallRuntime(Runtime::kNewStrictArguments);
   5118 }
   5119 
   5120 
   5121 void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) {
   5122   Register context_reg = esi;
   5123   Register slot_reg = ebx;
   5124   Register value_reg = eax;
   5125   Register cell_reg = edi;
   5126   Register cell_details_reg = edx;
   5127   Register cell_value_reg = ecx;
   5128   Label fast_heapobject_case, fast_smi_case, slow_case;
   5129 
   5130   if (FLAG_debug_code) {
   5131     __ CompareRoot(value_reg, Heap::kTheHoleValueRootIndex);
   5132     __ Check(not_equal, kUnexpectedValue);
   5133   }
   5134 
   5135   // Go up context chain to the script context.
   5136   for (int i = 0; i < depth(); ++i) {
   5137     __ mov(cell_reg, ContextOperand(context_reg, Context::PREVIOUS_INDEX));
   5138     context_reg = cell_reg;
   5139   }
   5140 
   5141   // Load the PropertyCell at the specified slot.
   5142   __ mov(cell_reg, ContextOperand(context_reg, slot_reg));
   5143 
   5144   // Load PropertyDetails for the cell (actually only the cell_type and kind).
   5145   __ mov(cell_details_reg,
   5146          FieldOperand(cell_reg, PropertyCell::kDetailsOffset));
   5147   __ SmiUntag(cell_details_reg);
   5148   __ and_(cell_details_reg,
   5149           Immediate(PropertyDetails::PropertyCellTypeField::kMask |
   5150                     PropertyDetails::KindField::kMask |
   5151                     PropertyDetails::kAttributesReadOnlyMask));
   5152 
   5153   // Check if PropertyCell holds mutable data.
   5154   Label not_mutable_data;
   5155   __ cmp(cell_details_reg,
   5156          Immediate(PropertyDetails::PropertyCellTypeField::encode(
   5157                        PropertyCellType::kMutable) |
   5158                    PropertyDetails::KindField::encode(kData)));
   5159   __ j(not_equal, &not_mutable_data);
   5160   __ JumpIfSmi(value_reg, &fast_smi_case);
   5161   __ bind(&fast_heapobject_case);
   5162   __ mov(FieldOperand(cell_reg, PropertyCell::kValueOffset), value_reg);
   5163   __ RecordWriteField(cell_reg, PropertyCell::kValueOffset, value_reg,
   5164                       cell_details_reg, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
   5165                       OMIT_SMI_CHECK);
   5166   // RecordWriteField clobbers the value register, so we need to reload.
   5167   __ mov(value_reg, FieldOperand(cell_reg, PropertyCell::kValueOffset));
   5168   __ Ret();
   5169   __ bind(&not_mutable_data);
   5170 
   5171   // Check if PropertyCell value matches the new value (relevant for Constant,
   5172   // ConstantType and Undefined cells).
   5173   Label not_same_value;
   5174   __ mov(cell_value_reg, FieldOperand(cell_reg, PropertyCell::kValueOffset));
   5175   __ cmp(cell_value_reg, value_reg);
   5176   __ j(not_equal, &not_same_value,
   5177        FLAG_debug_code ? Label::kFar : Label::kNear);
   5178   // Make sure the PropertyCell is not marked READ_ONLY.
   5179   __ test(cell_details_reg,
   5180           Immediate(PropertyDetails::kAttributesReadOnlyMask));
   5181   __ j(not_zero, &slow_case);
   5182   if (FLAG_debug_code) {
   5183     Label done;
   5184     // This can only be true for Constant, ConstantType and Undefined cells,
   5185     // because we never store the_hole via this stub.
   5186     __ cmp(cell_details_reg,
   5187            Immediate(PropertyDetails::PropertyCellTypeField::encode(
   5188                          PropertyCellType::kConstant) |
   5189                      PropertyDetails::KindField::encode(kData)));
   5190     __ j(equal, &done);
   5191     __ cmp(cell_details_reg,
   5192            Immediate(PropertyDetails::PropertyCellTypeField::encode(
   5193                          PropertyCellType::kConstantType) |
   5194                      PropertyDetails::KindField::encode(kData)));
   5195     __ j(equal, &done);
   5196     __ cmp(cell_details_reg,
   5197            Immediate(PropertyDetails::PropertyCellTypeField::encode(
   5198                          PropertyCellType::kUndefined) |
   5199                      PropertyDetails::KindField::encode(kData)));
   5200     __ Check(equal, kUnexpectedValue);
   5201     __ bind(&done);
   5202   }
   5203   __ Ret();
   5204   __ bind(&not_same_value);
   5205 
   5206   // Check if PropertyCell contains data with constant type (and is not
   5207   // READ_ONLY).
   5208   __ cmp(cell_details_reg,
   5209          Immediate(PropertyDetails::PropertyCellTypeField::encode(
   5210                        PropertyCellType::kConstantType) |
   5211                    PropertyDetails::KindField::encode(kData)));
   5212   __ j(not_equal, &slow_case, Label::kNear);
   5213 
   5214   // Now either both old and new values must be SMIs or both must be heap
   5215   // objects with same map.
   5216   Label value_is_heap_object;
   5217   __ JumpIfNotSmi(value_reg, &value_is_heap_object, Label::kNear);
   5218   __ JumpIfNotSmi(cell_value_reg, &slow_case, Label::kNear);
   5219   // Old and new values are SMIs, no need for a write barrier here.
   5220   __ bind(&fast_smi_case);
   5221   __ mov(FieldOperand(cell_reg, PropertyCell::kValueOffset), value_reg);
   5222   __ Ret();
   5223   __ bind(&value_is_heap_object);
   5224   __ JumpIfSmi(cell_value_reg, &slow_case, Label::kNear);
   5225   Register cell_value_map_reg = cell_value_reg;
   5226   __ mov(cell_value_map_reg,
   5227          FieldOperand(cell_value_reg, HeapObject::kMapOffset));
   5228   __ cmp(cell_value_map_reg, FieldOperand(value_reg, HeapObject::kMapOffset));
   5229   __ j(equal, &fast_heapobject_case);
   5230 
   5231   // Fallback to the runtime.
   5232   __ bind(&slow_case);
   5233   __ SmiTag(slot_reg);
   5234   __ Pop(cell_reg);  // Pop return address.
   5235   __ Push(slot_reg);
   5236   __ Push(value_reg);
   5237   __ Push(cell_reg);  // Push return address.
   5238   __ TailCallRuntime(is_strict(language_mode())
   5239                          ? Runtime::kStoreGlobalViaContext_Strict
   5240                          : Runtime::kStoreGlobalViaContext_Sloppy);
   5241 }
   5242 
   5243 
   5244 // Generates an Operand for saving parameters after PrepareCallApiFunction.
   5245 static Operand ApiParameterOperand(int index) {
   5246   return Operand(esp, index * kPointerSize);
   5247 }
   5248 
   5249 
   5250 // Prepares stack to put arguments (aligns and so on). Reserves
   5251 // space for return value if needed (assumes the return value is a handle).
   5252 // Arguments must be stored in ApiParameterOperand(0), ApiParameterOperand(1)
   5253 // etc. Saves context (esi). If space was reserved for return value then
   5254 // stores the pointer to the reserved slot into esi.
   5255 static void PrepareCallApiFunction(MacroAssembler* masm, int argc) {
   5256   __ EnterApiExitFrame(argc);
   5257   if (__ emit_debug_code()) {
   5258     __ mov(esi, Immediate(bit_cast<int32_t>(kZapValue)));
   5259   }
   5260 }
   5261 
   5262 
   5263 // Calls an API function.  Allocates HandleScope, extracts returned value
   5264 // from handle and propagates exceptions.  Clobbers ebx, edi and
   5265 // caller-save registers.  Restores context.  On return removes
   5266 // stack_space * kPointerSize (GCed).
   5267 static void CallApiFunctionAndReturn(MacroAssembler* masm,
   5268                                      Register function_address,
   5269                                      ExternalReference thunk_ref,
   5270                                      Operand thunk_last_arg, int stack_space,
   5271                                      Operand* stack_space_operand,
   5272                                      Operand return_value_operand,
   5273                                      Operand* context_restore_operand) {
   5274   Isolate* isolate = masm->isolate();
   5275 
   5276   ExternalReference next_address =
   5277       ExternalReference::handle_scope_next_address(isolate);
   5278   ExternalReference limit_address =
   5279       ExternalReference::handle_scope_limit_address(isolate);
   5280   ExternalReference level_address =
   5281       ExternalReference::handle_scope_level_address(isolate);
   5282 
   5283   DCHECK(edx.is(function_address));
   5284   // Allocate HandleScope in callee-save registers.
   5285   __ mov(ebx, Operand::StaticVariable(next_address));
   5286   __ mov(edi, Operand::StaticVariable(limit_address));
   5287   __ add(Operand::StaticVariable(level_address), Immediate(1));
   5288 
   5289   if (FLAG_log_timer_events) {
   5290     FrameScope frame(masm, StackFrame::MANUAL);
   5291     __ PushSafepointRegisters();
   5292     __ PrepareCallCFunction(1, eax);
   5293     __ mov(Operand(esp, 0),
   5294            Immediate(ExternalReference::isolate_address(isolate)));
   5295     __ CallCFunction(ExternalReference::log_enter_external_function(isolate),
   5296                      1);
   5297     __ PopSafepointRegisters();
   5298   }
   5299 
   5300 
   5301   Label profiler_disabled;
   5302   Label end_profiler_check;
   5303   __ mov(eax, Immediate(ExternalReference::is_profiling_address(isolate)));
   5304   __ cmpb(Operand(eax, 0), Immediate(0));
   5305   __ j(zero, &profiler_disabled);
   5306 
   5307   // Additional parameter is the address of the actual getter function.
   5308   __ mov(thunk_last_arg, function_address);
   5309   // Call the api function.
   5310   __ mov(eax, Immediate(thunk_ref));
   5311   __ call(eax);
   5312   __ jmp(&end_profiler_check);
   5313 
   5314   __ bind(&profiler_disabled);
   5315   // Call the api function.
   5316   __ call(function_address);
   5317   __ bind(&end_profiler_check);
   5318 
   5319   if (FLAG_log_timer_events) {
   5320     FrameScope frame(masm, StackFrame::MANUAL);
   5321     __ PushSafepointRegisters();
   5322     __ PrepareCallCFunction(1, eax);
   5323     __ mov(Operand(esp, 0),
   5324            Immediate(ExternalReference::isolate_address(isolate)));
   5325     __ CallCFunction(ExternalReference::log_leave_external_function(isolate),
   5326                      1);
   5327     __ PopSafepointRegisters();
   5328   }
   5329 
   5330   Label prologue;
   5331   // Load the value from ReturnValue
   5332   __ mov(eax, return_value_operand);
   5333 
   5334   Label promote_scheduled_exception;
   5335   Label delete_allocated_handles;
   5336   Label leave_exit_frame;
   5337 
   5338   __ bind(&prologue);
   5339   // No more valid handles (the result handle was the last one). Restore
   5340   // previous handle scope.
   5341   __ mov(Operand::StaticVariable(next_address), ebx);
   5342   __ sub(Operand::StaticVariable(level_address), Immediate(1));
   5343   __ Assert(above_equal, kInvalidHandleScopeLevel);
   5344   __ cmp(edi, Operand::StaticVariable(limit_address));
   5345   __ j(not_equal, &delete_allocated_handles);
   5346 
   5347   // Leave the API exit frame.
   5348   __ bind(&leave_exit_frame);
   5349   bool restore_context = context_restore_operand != NULL;
   5350   if (restore_context) {
   5351     __ mov(esi, *context_restore_operand);
   5352   }
   5353   if (stack_space_operand != nullptr) {
   5354     __ mov(ebx, *stack_space_operand);
   5355   }
   5356   __ LeaveApiExitFrame(!restore_context);
   5357 
   5358   // Check if the function scheduled an exception.
   5359   ExternalReference scheduled_exception_address =
   5360       ExternalReference::scheduled_exception_address(isolate);
   5361   __ cmp(Operand::StaticVariable(scheduled_exception_address),
   5362          Immediate(isolate->factory()->the_hole_value()));
   5363   __ j(not_equal, &promote_scheduled_exception);
   5364 
   5365 #if DEBUG
   5366   // Check if the function returned a valid JavaScript value.
   5367   Label ok;
   5368   Register return_value = eax;
   5369   Register map = ecx;
   5370 
   5371   __ JumpIfSmi(return_value, &ok, Label::kNear);
   5372   __ mov(map, FieldOperand(return_value, HeapObject::kMapOffset));
   5373 
   5374   __ CmpInstanceType(map, LAST_NAME_TYPE);
   5375   __ j(below_equal, &ok, Label::kNear);
   5376 
   5377   __ CmpInstanceType(map, FIRST_JS_RECEIVER_TYPE);
   5378   __ j(above_equal, &ok, Label::kNear);
   5379 
   5380   __ cmp(map, isolate->factory()->heap_number_map());
   5381   __ j(equal, &ok, Label::kNear);
   5382 
   5383   __ cmp(return_value, isolate->factory()->undefined_value());
   5384   __ j(equal, &ok, Label::kNear);
   5385 
   5386   __ cmp(return_value, isolate->factory()->true_value());
   5387   __ j(equal, &ok, Label::kNear);
   5388 
   5389   __ cmp(return_value, isolate->factory()->false_value());
   5390   __ j(equal, &ok, Label::kNear);
   5391 
   5392   __ cmp(return_value, isolate->factory()->null_value());
   5393   __ j(equal, &ok, Label::kNear);
   5394 
   5395   __ Abort(kAPICallReturnedInvalidObject);
   5396 
   5397   __ bind(&ok);
   5398 #endif
   5399 
   5400   if (stack_space_operand != nullptr) {
   5401     DCHECK_EQ(0, stack_space);
   5402     __ pop(ecx);
   5403     __ add(esp, ebx);
   5404     __ jmp(ecx);
   5405   } else {
   5406     __ ret(stack_space * kPointerSize);
   5407   }
   5408 
   5409   // Re-throw by promoting a scheduled exception.
   5410   __ bind(&promote_scheduled_exception);
   5411   __ TailCallRuntime(Runtime::kPromoteScheduledException);
   5412 
   5413   // HandleScope limit has changed. Delete allocated extensions.
   5414   ExternalReference delete_extensions =
   5415       ExternalReference::delete_handle_scope_extensions(isolate);
   5416   __ bind(&delete_allocated_handles);
   5417   __ mov(Operand::StaticVariable(limit_address), edi);
   5418   __ mov(edi, eax);
   5419   __ mov(Operand(esp, 0),
   5420          Immediate(ExternalReference::isolate_address(isolate)));
   5421   __ mov(eax, Immediate(delete_extensions));
   5422   __ call(eax);
   5423   __ mov(eax, edi);
   5424   __ jmp(&leave_exit_frame);
   5425 }
   5426 
   5427 void CallApiCallbackStub::Generate(MacroAssembler* masm) {
   5428   // ----------- S t a t e -------------
   5429   //  -- edi                 : callee
   5430   //  -- ebx                 : call_data
   5431   //  -- ecx                 : holder
   5432   //  -- edx                 : api_function_address
   5433   //  -- esi                 : context
   5434   //  --
   5435   //  -- esp[0]              : return address
   5436   //  -- esp[4]              : last argument
   5437   //  -- ...
   5438   //  -- esp[argc * 4]       : first argument
   5439   //  -- esp[(argc + 1) * 4] : receiver
   5440   // -----------------------------------
   5441 
   5442   Register callee = edi;
   5443   Register call_data = ebx;
   5444   Register holder = ecx;
   5445   Register api_function_address = edx;
   5446   Register context = esi;
   5447   Register return_address = eax;
   5448 
   5449   typedef FunctionCallbackArguments FCA;
   5450 
   5451   STATIC_ASSERT(FCA::kContextSaveIndex == 6);
   5452   STATIC_ASSERT(FCA::kCalleeIndex == 5);
   5453   STATIC_ASSERT(FCA::kDataIndex == 4);
   5454   STATIC_ASSERT(FCA::kReturnValueOffset == 3);
   5455   STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
   5456   STATIC_ASSERT(FCA::kIsolateIndex == 1);
   5457   STATIC_ASSERT(FCA::kHolderIndex == 0);
   5458   STATIC_ASSERT(FCA::kNewTargetIndex == 7);
   5459   STATIC_ASSERT(FCA::kArgsLength == 8);
   5460 
   5461   __ pop(return_address);
   5462 
   5463   // new target
   5464   __ PushRoot(Heap::kUndefinedValueRootIndex);
   5465 
   5466   // context save.
   5467   __ push(context);
   5468 
   5469   // callee
   5470   __ push(callee);
   5471 
   5472   // call data
   5473   __ push(call_data);
   5474 
   5475   Register scratch = call_data;
   5476   if (!call_data_undefined()) {
   5477     // return value
   5478     __ push(Immediate(masm->isolate()->factory()->undefined_value()));
   5479     // return value default
   5480     __ push(Immediate(masm->isolate()->factory()->undefined_value()));
   5481   } else {
   5482     // return value
   5483     __ push(scratch);
   5484     // return value default
   5485     __ push(scratch);
   5486   }
   5487   // isolate
   5488   __ push(Immediate(reinterpret_cast<int>(masm->isolate())));
   5489   // holder
   5490   __ push(holder);
   5491 
   5492   __ mov(scratch, esp);
   5493 
   5494   // push return address
   5495   __ push(return_address);
   5496 
   5497   if (!is_lazy()) {
   5498     // load context from callee
   5499     __ mov(context, FieldOperand(callee, JSFunction::kContextOffset));
   5500   }
   5501 
   5502   // API function gets reference to the v8::Arguments. If CPU profiler
   5503   // is enabled wrapper function will be called and we need to pass
   5504   // address of the callback as additional parameter, always allocate
   5505   // space for it.
   5506   const int kApiArgc = 1 + 1;
   5507 
   5508   // Allocate the v8::Arguments structure in the arguments' space since
   5509   // it's not controlled by GC.
   5510   const int kApiStackSpace = 3;
   5511 
   5512   PrepareCallApiFunction(masm, kApiArgc + kApiStackSpace);
   5513 
   5514   // FunctionCallbackInfo::implicit_args_.
   5515   __ mov(ApiParameterOperand(2), scratch);
   5516   __ add(scratch, Immediate((argc() + FCA::kArgsLength - 1) * kPointerSize));
   5517   // FunctionCallbackInfo::values_.
   5518   __ mov(ApiParameterOperand(3), scratch);
   5519   // FunctionCallbackInfo::length_.
   5520   __ Move(ApiParameterOperand(4), Immediate(argc()));
   5521 
   5522   // v8::InvocationCallback's argument.
   5523   __ lea(scratch, ApiParameterOperand(2));
   5524   __ mov(ApiParameterOperand(0), scratch);
   5525 
   5526   ExternalReference thunk_ref =
   5527       ExternalReference::invoke_function_callback(masm->isolate());
   5528 
   5529   Operand context_restore_operand(ebp,
   5530                                   (2 + FCA::kContextSaveIndex) * kPointerSize);
   5531   // Stores return the first js argument
   5532   int return_value_offset = 0;
   5533   if (is_store()) {
   5534     return_value_offset = 2 + FCA::kArgsLength;
   5535   } else {
   5536     return_value_offset = 2 + FCA::kReturnValueOffset;
   5537   }
   5538   Operand return_value_operand(ebp, return_value_offset * kPointerSize);
   5539   int stack_space = 0;
   5540   Operand length_operand = ApiParameterOperand(4);
   5541   Operand* stack_space_operand = &length_operand;
   5542   stack_space = argc() + FCA::kArgsLength + 1;
   5543   stack_space_operand = nullptr;
   5544   CallApiFunctionAndReturn(masm, api_function_address, thunk_ref,
   5545                            ApiParameterOperand(1), stack_space,
   5546                            stack_space_operand, return_value_operand,
   5547                            &context_restore_operand);
   5548 }
   5549 
   5550 
   5551 void CallApiGetterStub::Generate(MacroAssembler* masm) {
   5552   // Build v8::PropertyCallbackInfo::args_ array on the stack and push property
   5553   // name below the exit frame to make GC aware of them.
   5554   STATIC_ASSERT(PropertyCallbackArguments::kShouldThrowOnErrorIndex == 0);
   5555   STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 1);
   5556   STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 2);
   5557   STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 3);
   5558   STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 4);
   5559   STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 5);
   5560   STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 6);
   5561   STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 7);
   5562 
   5563   Register receiver = ApiGetterDescriptor::ReceiverRegister();
   5564   Register holder = ApiGetterDescriptor::HolderRegister();
   5565   Register callback = ApiGetterDescriptor::CallbackRegister();
   5566   Register scratch = ebx;
   5567   DCHECK(!AreAliased(receiver, holder, callback, scratch));
   5568 
   5569   __ pop(scratch);  // Pop return address to extend the frame.
   5570   __ push(receiver);
   5571   __ push(FieldOperand(callback, AccessorInfo::kDataOffset));
   5572   __ PushRoot(Heap::kUndefinedValueRootIndex);  // ReturnValue
   5573   // ReturnValue default value
   5574   __ PushRoot(Heap::kUndefinedValueRootIndex);
   5575   __ push(Immediate(ExternalReference::isolate_address(isolate())));
   5576   __ push(holder);
   5577   __ push(Immediate(Smi::FromInt(0)));  // should_throw_on_error -> false
   5578   __ push(FieldOperand(callback, AccessorInfo::kNameOffset));
   5579   __ push(scratch);  // Restore return address.
   5580 
   5581   // v8::PropertyCallbackInfo::args_ array and name handle.
   5582   const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;
   5583 
   5584   // Allocate v8::PropertyCallbackInfo object, arguments for callback and
   5585   // space for optional callback address parameter (in case CPU profiler is
   5586   // active) in non-GCed stack space.
   5587   const int kApiArgc = 3 + 1;
   5588 
   5589   // Load address of v8::PropertyAccessorInfo::args_ array.
   5590   __ lea(scratch, Operand(esp, 2 * kPointerSize));
   5591 
   5592   PrepareCallApiFunction(masm, kApiArgc);
   5593   // Create v8::PropertyCallbackInfo object on the stack and initialize
   5594   // it's args_ field.
   5595   Operand info_object = ApiParameterOperand(3);
   5596   __ mov(info_object, scratch);
   5597 
   5598   // Name as handle.
   5599   __ sub(scratch, Immediate(kPointerSize));
   5600   __ mov(ApiParameterOperand(0), scratch);
   5601   // Arguments pointer.
   5602   __ lea(scratch, info_object);
   5603   __ mov(ApiParameterOperand(1), scratch);
   5604   // Reserve space for optional callback address parameter.
   5605   Operand thunk_last_arg = ApiParameterOperand(2);
   5606 
   5607   ExternalReference thunk_ref =
   5608       ExternalReference::invoke_accessor_getter_callback(isolate());
   5609 
   5610   __ mov(scratch, FieldOperand(callback, AccessorInfo::kJsGetterOffset));
   5611   Register function_address = edx;
   5612   __ mov(function_address,
   5613          FieldOperand(scratch, Foreign::kForeignAddressOffset));
   5614   // +3 is to skip prolog, return address and name handle.
   5615   Operand return_value_operand(
   5616       ebp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize);
   5617   CallApiFunctionAndReturn(masm, function_address, thunk_ref, thunk_last_arg,
   5618                            kStackUnwindSpace, nullptr, return_value_operand,
   5619                            NULL);
   5620 }
   5621 
   5622 #undef __
   5623 
   5624 }  // namespace internal
   5625 }  // namespace v8
   5626 
   5627 #endif  // V8_TARGET_ARCH_IA32
   5628