Home | History | Annotate | Download | only in x64
      1 // Copyright 2013 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #if V8_TARGET_ARCH_X64
      6 
      7 #include "src/api-arguments.h"
      8 #include "src/bootstrapper.h"
      9 #include "src/code-stubs.h"
     10 #include "src/codegen.h"
     11 #include "src/counters.h"
     12 #include "src/double.h"
     13 #include "src/heap/heap-inl.h"
     14 #include "src/ic/handler-compiler.h"
     15 #include "src/ic/ic.h"
     16 #include "src/ic/stub-cache.h"
     17 #include "src/isolate.h"
     18 #include "src/objects-inl.h"
     19 #include "src/objects/regexp-match-info.h"
     20 #include "src/regexp/jsregexp.h"
     21 #include "src/regexp/regexp-macro-assembler.h"
     22 #include "src/runtime/runtime.h"
     23 
     24 #include "src/x64/code-stubs-x64.h"  // Cannot be the first include.
     25 
     26 namespace v8 {
     27 namespace internal {
     28 
     29 #define __ ACCESS_MASM(masm)
     30 
     31 void ArrayNArgumentsConstructorStub::Generate(MacroAssembler* masm) {
     32   __ popq(rcx);
     33   __ movq(MemOperand(rsp, rax, times_8, 0), rdi);
     34   __ pushq(rdi);
     35   __ pushq(rbx);
     36   __ pushq(rcx);
     37   __ addq(rax, Immediate(3));
     38   __ TailCallRuntime(Runtime::kNewArray);
     39 }
     40 
     41 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm,
     42                                                ExternalReference miss) {
     43   // Update the static counter each time a new code stub is generated.
     44   isolate()->counters()->code_stubs()->Increment();
     45 
     46   CallInterfaceDescriptor descriptor = GetCallInterfaceDescriptor();
     47   int param_count = descriptor.GetRegisterParameterCount();
     48   {
     49     // Call the runtime system in a fresh internal frame.
     50     FrameScope scope(masm, StackFrame::INTERNAL);
     51     DCHECK(param_count == 0 ||
     52            rax.is(descriptor.GetRegisterParameter(param_count - 1)));
     53     // Push arguments
     54     for (int i = 0; i < param_count; ++i) {
     55       __ Push(descriptor.GetRegisterParameter(i));
     56     }
     57     __ CallExternalReference(miss, param_count);
     58   }
     59 
     60   __ Ret();
     61 }
     62 
     63 
     64 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
     65   __ PushCallerSaved(save_doubles() ? kSaveFPRegs : kDontSaveFPRegs);
     66   const int argument_count = 1;
     67   __ PrepareCallCFunction(argument_count);
     68   __ LoadAddress(arg_reg_1,
     69                  ExternalReference::isolate_address(isolate()));
     70 
     71   AllowExternalCallThatCantCauseGC scope(masm);
     72   __ CallCFunction(
     73       ExternalReference::store_buffer_overflow_function(isolate()),
     74       argument_count);
     75   __ PopCallerSaved(save_doubles() ? kSaveFPRegs : kDontSaveFPRegs);
     76   __ ret(0);
     77 }
     78 
     79 
     80 class FloatingPointHelper : public AllStatic {
     81  public:
     82   enum ConvertUndefined {
     83     CONVERT_UNDEFINED_TO_ZERO,
     84     BAILOUT_ON_UNDEFINED
     85   };
     86   // Load the operands from rdx and rax into xmm0 and xmm1, as doubles.
     87   // If the operands are not both numbers, jump to not_numbers.
     88   // Leaves rdx and rax unchanged.  SmiOperands assumes both are smis.
     89   // NumberOperands assumes both are smis or heap numbers.
     90   static void LoadSSE2UnknownOperands(MacroAssembler* masm,
     91                                       Label* not_numbers);
     92 };
     93 
     94 
     95 void DoubleToIStub::Generate(MacroAssembler* masm) {
     96     Register input_reg = this->source();
     97     Register final_result_reg = this->destination();
     98     DCHECK(is_truncating());
     99 
    100     Label check_negative, process_64_bits, done;
    101 
    102     int double_offset = offset();
    103 
    104     // Account for return address and saved regs if input is rsp.
    105     if (input_reg.is(rsp)) double_offset += 3 * kRegisterSize;
    106 
    107     MemOperand mantissa_operand(MemOperand(input_reg, double_offset));
    108     MemOperand exponent_operand(MemOperand(input_reg,
    109                                            double_offset + kDoubleSize / 2));
    110 
    111     Register scratch1;
    112     Register scratch_candidates[3] = { rbx, rdx, rdi };
    113     for (int i = 0; i < 3; i++) {
    114       scratch1 = scratch_candidates[i];
    115       if (!final_result_reg.is(scratch1) && !input_reg.is(scratch1)) break;
    116     }
    117 
    118     // Since we must use rcx for shifts below, use some other register (rax)
    119     // to calculate the result if ecx is the requested return register.
    120     Register result_reg = final_result_reg.is(rcx) ? rax : final_result_reg;
    121     // Save ecx if it isn't the return register and therefore volatile, or if it
    122     // is the return register, then save the temp register we use in its stead
    123     // for the result.
    124     Register save_reg = final_result_reg.is(rcx) ? rax : rcx;
    125     __ pushq(scratch1);
    126     __ pushq(save_reg);
    127 
    128     bool stash_exponent_copy = !input_reg.is(rsp);
    129     __ movl(scratch1, mantissa_operand);
    130     __ Movsd(kScratchDoubleReg, mantissa_operand);
    131     __ movl(rcx, exponent_operand);
    132     if (stash_exponent_copy) __ pushq(rcx);
    133 
    134     __ andl(rcx, Immediate(HeapNumber::kExponentMask));
    135     __ shrl(rcx, Immediate(HeapNumber::kExponentShift));
    136     __ leal(result_reg, MemOperand(rcx, -HeapNumber::kExponentBias));
    137     __ cmpl(result_reg, Immediate(HeapNumber::kMantissaBits));
    138     __ j(below, &process_64_bits);
    139 
    140     // Result is entirely in lower 32-bits of mantissa
    141     int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize;
    142     __ subl(rcx, Immediate(delta));
    143     __ xorl(result_reg, result_reg);
    144     __ cmpl(rcx, Immediate(31));
    145     __ j(above, &done);
    146     __ shll_cl(scratch1);
    147     __ jmp(&check_negative);
    148 
    149     __ bind(&process_64_bits);
    150     __ Cvttsd2siq(result_reg, kScratchDoubleReg);
    151     __ jmp(&done, Label::kNear);
    152 
    153     // If the double was negative, negate the integer result.
    154     __ bind(&check_negative);
    155     __ movl(result_reg, scratch1);
    156     __ negl(result_reg);
    157     if (stash_exponent_copy) {
    158         __ cmpl(MemOperand(rsp, 0), Immediate(0));
    159     } else {
    160         __ cmpl(exponent_operand, Immediate(0));
    161     }
    162     __ cmovl(greater, result_reg, scratch1);
    163 
    164     // Restore registers
    165     __ bind(&done);
    166     if (stash_exponent_copy) {
    167         __ addp(rsp, Immediate(kDoubleSize));
    168     }
    169     if (!final_result_reg.is(result_reg)) {
    170         DCHECK(final_result_reg.is(rcx));
    171         __ movl(final_result_reg, result_reg);
    172     }
    173     __ popq(save_reg);
    174     __ popq(scratch1);
    175     __ ret(0);
    176 }
    177 
    178 
    179 void FloatingPointHelper::LoadSSE2UnknownOperands(MacroAssembler* masm,
    180                                                   Label* not_numbers) {
    181   Label load_smi_rdx, load_nonsmi_rax, load_smi_rax, load_float_rax, done;
    182   // Load operand in rdx into xmm0, or branch to not_numbers.
    183   __ LoadRoot(rcx, Heap::kHeapNumberMapRootIndex);
    184   __ JumpIfSmi(rdx, &load_smi_rdx);
    185   __ cmpp(FieldOperand(rdx, HeapObject::kMapOffset), rcx);
    186   __ j(not_equal, not_numbers);  // Argument in rdx is not a number.
    187   __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
    188   // Load operand in rax into xmm1, or branch to not_numbers.
    189   __ JumpIfSmi(rax, &load_smi_rax);
    190 
    191   __ bind(&load_nonsmi_rax);
    192   __ cmpp(FieldOperand(rax, HeapObject::kMapOffset), rcx);
    193   __ j(not_equal, not_numbers);
    194   __ Movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
    195   __ jmp(&done);
    196 
    197   __ bind(&load_smi_rdx);
    198   __ SmiToInteger32(kScratchRegister, rdx);
    199   __ Cvtlsi2sd(xmm0, kScratchRegister);
    200   __ JumpIfNotSmi(rax, &load_nonsmi_rax);
    201 
    202   __ bind(&load_smi_rax);
    203   __ SmiToInteger32(kScratchRegister, rax);
    204   __ Cvtlsi2sd(xmm1, kScratchRegister);
    205   __ bind(&done);
    206 }
    207 
    208 
    209 void MathPowStub::Generate(MacroAssembler* masm) {
    210   const Register exponent = MathPowTaggedDescriptor::exponent();
    211   DCHECK(exponent.is(rdx));
    212   const Register scratch = rcx;
    213   const XMMRegister double_result = xmm3;
    214   const XMMRegister double_base = xmm2;
    215   const XMMRegister double_exponent = xmm1;
    216   const XMMRegister double_scratch = xmm4;
    217 
    218   Label call_runtime, done, exponent_not_smi, int_exponent;
    219 
    220   // Save 1 in double_result - we need this several times later on.
    221   __ movp(scratch, Immediate(1));
    222   __ Cvtlsi2sd(double_result, scratch);
    223 
    224   if (exponent_type() == TAGGED) {
    225     __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
    226     __ SmiToInteger32(exponent, exponent);
    227     __ jmp(&int_exponent);
    228 
    229     __ bind(&exponent_not_smi);
    230     __ Movsd(double_exponent, FieldOperand(exponent, HeapNumber::kValueOffset));
    231   }
    232 
    233   if (exponent_type() != INTEGER) {
    234     Label fast_power, try_arithmetic_simplification;
    235     // Detect integer exponents stored as double.
    236     __ DoubleToI(exponent, double_exponent, double_scratch,
    237                  TREAT_MINUS_ZERO_AS_ZERO, &try_arithmetic_simplification,
    238                  &try_arithmetic_simplification,
    239                  &try_arithmetic_simplification);
    240     __ jmp(&int_exponent);
    241 
    242     __ bind(&try_arithmetic_simplification);
    243     __ Cvttsd2si(exponent, double_exponent);
    244     // Skip to runtime if possibly NaN (indicated by the indefinite integer).
    245     __ cmpl(exponent, Immediate(0x1));
    246     __ j(overflow, &call_runtime);
    247 
    248     // Using FPU instructions to calculate power.
    249     Label fast_power_failed;
    250     __ bind(&fast_power);
    251     __ fnclex();  // Clear flags to catch exceptions later.
    252     // Transfer (B)ase and (E)xponent onto the FPU register stack.
    253     __ subp(rsp, Immediate(kDoubleSize));
    254     __ Movsd(Operand(rsp, 0), double_exponent);
    255     __ fld_d(Operand(rsp, 0));  // E
    256     __ Movsd(Operand(rsp, 0), double_base);
    257     __ fld_d(Operand(rsp, 0));  // B, E
    258 
    259     // Exponent is in st(1) and base is in st(0)
    260     // B ^ E = (2^(E * log2(B)) - 1) + 1 = (2^X - 1) + 1 for X = E * log2(B)
    261     // FYL2X calculates st(1) * log2(st(0))
    262     __ fyl2x();    // X
    263     __ fld(0);     // X, X
    264     __ frndint();  // rnd(X), X
    265     __ fsub(1);    // rnd(X), X-rnd(X)
    266     __ fxch(1);    // X - rnd(X), rnd(X)
    267     // F2XM1 calculates 2^st(0) - 1 for -1 < st(0) < 1
    268     __ f2xm1();    // 2^(X-rnd(X)) - 1, rnd(X)
    269     __ fld1();     // 1, 2^(X-rnd(X)) - 1, rnd(X)
    270     __ faddp(1);   // 2^(X-rnd(X)), rnd(X)
    271     // FSCALE calculates st(0) * 2^st(1)
    272     __ fscale();   // 2^X, rnd(X)
    273     __ fstp(1);
    274     // Bail out to runtime in case of exceptions in the status word.
    275     __ fnstsw_ax();
    276     __ testb(rax, Immediate(0x5F));  // Check for all but precision exception.
    277     __ j(not_zero, &fast_power_failed, Label::kNear);
    278     __ fstp_d(Operand(rsp, 0));
    279     __ Movsd(double_result, Operand(rsp, 0));
    280     __ addp(rsp, Immediate(kDoubleSize));
    281     __ jmp(&done);
    282 
    283     __ bind(&fast_power_failed);
    284     __ fninit();
    285     __ addp(rsp, Immediate(kDoubleSize));
    286     __ jmp(&call_runtime);
    287   }
    288 
    289   // Calculate power with integer exponent.
    290   __ bind(&int_exponent);
    291   const XMMRegister double_scratch2 = double_exponent;
    292   // Back up exponent as we need to check if exponent is negative later.
    293   __ movp(scratch, exponent);  // Back up exponent.
    294   __ Movsd(double_scratch, double_base);     // Back up base.
    295   __ Movsd(double_scratch2, double_result);  // Load double_exponent with 1.
    296 
    297   // Get absolute value of exponent.
    298   Label no_neg, while_true, while_false;
    299   __ testl(scratch, scratch);
    300   __ j(positive, &no_neg, Label::kNear);
    301   __ negl(scratch);
    302   __ bind(&no_neg);
    303 
    304   __ j(zero, &while_false, Label::kNear);
    305   __ shrl(scratch, Immediate(1));
    306   // Above condition means CF==0 && ZF==0.  This means that the
    307   // bit that has been shifted out is 0 and the result is not 0.
    308   __ j(above, &while_true, Label::kNear);
    309   __ Movsd(double_result, double_scratch);
    310   __ j(zero, &while_false, Label::kNear);
    311 
    312   __ bind(&while_true);
    313   __ shrl(scratch, Immediate(1));
    314   __ Mulsd(double_scratch, double_scratch);
    315   __ j(above, &while_true, Label::kNear);
    316   __ Mulsd(double_result, double_scratch);
    317   __ j(not_zero, &while_true);
    318 
    319   __ bind(&while_false);
    320   // If the exponent is negative, return 1/result.
    321   __ testl(exponent, exponent);
    322   __ j(greater, &done);
    323   __ Divsd(double_scratch2, double_result);
    324   __ Movsd(double_result, double_scratch2);
    325   // Test whether result is zero.  Bail out to check for subnormal result.
    326   // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
    327   __ Xorpd(double_scratch2, double_scratch2);
    328   __ Ucomisd(double_scratch2, double_result);
    329   // double_exponent aliased as double_scratch2 has already been overwritten
    330   // and may not have contained the exponent value in the first place when the
    331   // input was a smi.  We reset it with exponent value before bailing out.
    332   __ j(not_equal, &done);
    333   __ Cvtlsi2sd(double_exponent, exponent);
    334 
    335   // Returning or bailing out.
    336   __ bind(&call_runtime);
    337   // Move base to the correct argument register.  Exponent is already in xmm1.
    338   __ Movsd(xmm0, double_base);
    339   DCHECK(double_exponent.is(xmm1));
    340   {
    341     AllowExternalCallThatCantCauseGC scope(masm);
    342     __ PrepareCallCFunction(2);
    343     __ CallCFunction(ExternalReference::power_double_double_function(isolate()),
    344                      2);
    345   }
    346   // Return value is in xmm0.
    347   __ Movsd(double_result, xmm0);
    348 
    349   __ bind(&done);
    350   __ ret(0);
    351 }
    352 
    353 void RegExpExecStub::Generate(MacroAssembler* masm) {
    354   // Just jump directly to runtime if native RegExp is not selected at compile
    355   // time or if regexp entry in generated code is turned off runtime switch or
    356   // at compilation.
    357 #ifdef V8_INTERPRETED_REGEXP
    358   __ TailCallRuntime(Runtime::kRegExpExec);
    359 #else  // V8_INTERPRETED_REGEXP
    360 
    361   // Stack frame on entry.
    362   //  rsp[0]  : return address
    363   //  rsp[8]  : last_match_info (expected JSArray)
    364   //  rsp[16] : previous index
    365   //  rsp[24] : subject string
    366   //  rsp[32] : JSRegExp object
    367 
    368   enum RegExpExecStubArgumentIndices {
    369     JS_REG_EXP_OBJECT_ARGUMENT_INDEX,
    370     SUBJECT_STRING_ARGUMENT_INDEX,
    371     PREVIOUS_INDEX_ARGUMENT_INDEX,
    372     LAST_MATCH_INFO_ARGUMENT_INDEX,
    373     REG_EXP_EXEC_ARGUMENT_COUNT
    374   };
    375 
    376   StackArgumentsAccessor args(rsp, REG_EXP_EXEC_ARGUMENT_COUNT,
    377                               ARGUMENTS_DONT_CONTAIN_RECEIVER);
    378   Label runtime;
    379   // Ensure that a RegExp stack is allocated.
    380   ExternalReference address_of_regexp_stack_memory_address =
    381       ExternalReference::address_of_regexp_stack_memory_address(isolate());
    382   ExternalReference address_of_regexp_stack_memory_size =
    383       ExternalReference::address_of_regexp_stack_memory_size(isolate());
    384   __ Load(kScratchRegister, address_of_regexp_stack_memory_size);
    385   __ testp(kScratchRegister, kScratchRegister);
    386   __ j(zero, &runtime);
    387 
    388   // Check that the first argument is a JSRegExp object.
    389   __ movp(rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX));
    390   __ JumpIfSmi(rax, &runtime);
    391   __ CmpObjectType(rax, JS_REGEXP_TYPE, kScratchRegister);
    392   __ j(not_equal, &runtime);
    393 
    394   // Check that the RegExp has been compiled (data contains a fixed array).
    395   __ movp(rax, FieldOperand(rax, JSRegExp::kDataOffset));
    396   if (FLAG_debug_code) {
    397     Condition is_smi = masm->CheckSmi(rax);
    398     __ Check(NegateCondition(is_smi),
    399         kUnexpectedTypeForRegExpDataFixedArrayExpected);
    400     __ CmpObjectType(rax, FIXED_ARRAY_TYPE, kScratchRegister);
    401     __ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected);
    402   }
    403 
    404   // rax: RegExp data (FixedArray)
    405   // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
    406   __ SmiToInteger32(rbx, FieldOperand(rax, JSRegExp::kDataTagOffset));
    407   __ cmpl(rbx, Immediate(JSRegExp::IRREGEXP));
    408   __ j(not_equal, &runtime);
    409 
    410   // rax: RegExp data (FixedArray)
    411   // Check that the number of captures fit in the static offsets vector buffer.
    412   __ SmiToInteger32(rdx,
    413                     FieldOperand(rax, JSRegExp::kIrregexpCaptureCountOffset));
    414   // Check (number_of_captures + 1) * 2 <= offsets vector size
    415   // Or              number_of_captures <= offsets vector size / 2 - 1
    416   STATIC_ASSERT(Isolate::kJSRegexpStaticOffsetsVectorSize >= 2);
    417   __ cmpl(rdx, Immediate(Isolate::kJSRegexpStaticOffsetsVectorSize / 2 - 1));
    418   __ j(above, &runtime);
    419 
    420   // Reset offset for possibly sliced string.
    421   __ Set(r14, 0);
    422   __ movp(rdi, args.GetArgumentOperand(SUBJECT_STRING_ARGUMENT_INDEX));
    423   __ JumpIfSmi(rdi, &runtime);
    424   __ movp(r15, rdi);  // Make a copy of the original subject string.
    425   // rax: RegExp data (FixedArray)
    426   // rdi: subject string
    427   // r15: subject string
    428   // Handle subject string according to its encoding and representation:
    429   // (1) Sequential two byte?  If yes, go to (9).
    430   // (2) Sequential one byte?  If yes, go to (5).
    431   // (3) Sequential or cons?  If not, go to (6).
    432   // (4) Cons string.  If the string is flat, replace subject with first string
    433   //     and go to (1). Otherwise bail out to runtime.
    434   // (5) One byte sequential.  Load regexp code for one byte.
    435   // (E) Carry on.
    436   /// [...]
    437 
    438   // Deferred code at the end of the stub:
    439   // (6) Long external string?  If not, go to (10).
    440   // (7) External string.  Make it, offset-wise, look like a sequential string.
    441   // (8) Is the external string one byte?  If yes, go to (5).
    442   // (9) Two byte sequential.  Load regexp code for two byte. Go to (E).
    443   // (10) Short external string or not a string?  If yes, bail out to runtime.
    444   // (11) Sliced or thin string.  Replace subject with parent. Go to (1).
    445 
    446   Label seq_one_byte_string /* 5 */, seq_two_byte_string /* 9 */,
    447       external_string /* 7 */, check_underlying /* 1 */,
    448       not_seq_nor_cons /* 6 */, check_code /* E */, not_long_external /* 10 */;
    449 
    450   __ bind(&check_underlying);
    451   __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
    452   __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
    453 
    454   // (1) Sequential two byte?  If yes, go to (9).
    455   __ andb(rbx, Immediate(kIsNotStringMask |
    456                          kStringRepresentationMask |
    457                          kStringEncodingMask |
    458                          kShortExternalStringMask));
    459   STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0);
    460   __ j(zero, &seq_two_byte_string);  // Go to (9).
    461 
    462   // (2) Sequential one byte?  If yes, go to (5).
    463   // Any other sequential string must be one byte.
    464   __ andb(rbx, Immediate(kIsNotStringMask |
    465                          kStringRepresentationMask |
    466                          kShortExternalStringMask));
    467   __ j(zero, &seq_one_byte_string, Label::kNear);  // Go to (5).
    468 
    469   // (3) Sequential or cons?  If not, go to (6).
    470   // We check whether the subject string is a cons, since sequential strings
    471   // have already been covered.
    472   STATIC_ASSERT(kConsStringTag < kExternalStringTag);
    473   STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
    474   STATIC_ASSERT(kThinStringTag > kExternalStringTag);
    475   STATIC_ASSERT(kIsNotStringMask > kExternalStringTag);
    476   STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag);
    477   __ cmpp(rbx, Immediate(kExternalStringTag));
    478   __ j(greater_equal, &not_seq_nor_cons);  // Go to (6).
    479 
    480   // (4) Cons string.  Check that it's flat.
    481   // Replace subject with first string and reload instance type.
    482   __ CompareRoot(FieldOperand(rdi, ConsString::kSecondOffset),
    483                  Heap::kempty_stringRootIndex);
    484   __ j(not_equal, &runtime);
    485   __ movp(rdi, FieldOperand(rdi, ConsString::kFirstOffset));
    486   __ jmp(&check_underlying);
    487 
    488   // (5) One byte sequential.  Load regexp code for one byte.
    489   __ bind(&seq_one_byte_string);
    490   // rax: RegExp data (FixedArray)
    491   __ movp(r11, FieldOperand(rax, JSRegExp::kDataOneByteCodeOffset));
    492   __ Set(rcx, 1);  // Type is one byte.
    493 
    494   // (E) Carry on.  String handling is done.
    495   __ bind(&check_code);
    496   // r11: irregexp code
    497   // Check that the irregexp code has been generated for the actual string
    498   // encoding. If it has, the field contains a code object otherwise it contains
    499   // smi (code flushing support)
    500   __ JumpIfSmi(r11, &runtime);
    501 
    502   // rdi: sequential subject string (or look-alike, external string)
    503   // r15: original subject string
    504   // rcx: encoding of subject string (1 if one_byte, 0 if two_byte);
    505   // r11: code
    506   // Load used arguments before starting to push arguments for call to native
    507   // RegExp code to avoid handling changing stack height.
    508   // We have to use r15 instead of rdi to load the length because rdi might
    509   // have been only made to look like a sequential string when it actually
    510   // is an external string.
    511   __ movp(rbx, args.GetArgumentOperand(PREVIOUS_INDEX_ARGUMENT_INDEX));
    512   __ JumpIfNotSmi(rbx, &runtime);
    513   __ SmiCompare(rbx, FieldOperand(r15, String::kLengthOffset));
    514   __ j(above_equal, &runtime);
    515   __ SmiToInteger64(rbx, rbx);
    516 
    517   // rdi: subject string
    518   // rbx: previous index
    519   // rcx: encoding of subject string (1 if one_byte 0 if two_byte);
    520   // r11: code
    521   // All checks done. Now push arguments for native regexp code.
    522   Counters* counters = isolate()->counters();
    523   __ IncrementCounter(counters->regexp_entry_native(), 1);
    524 
    525   // Isolates: note we add an additional parameter here (isolate pointer).
    526   static const int kRegExpExecuteArguments = 9;
    527   int argument_slots_on_stack =
    528       masm->ArgumentStackSlotsForCFunctionCall(kRegExpExecuteArguments);
    529   __ EnterApiExitFrame(argument_slots_on_stack);
    530 
    531   // Argument 9: Pass current isolate address.
    532   __ LoadAddress(kScratchRegister,
    533                  ExternalReference::isolate_address(isolate()));
    534   __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kRegisterSize),
    535           kScratchRegister);
    536 
    537   // Argument 8: Indicate that this is a direct call from JavaScript.
    538   __ movq(Operand(rsp, (argument_slots_on_stack - 2) * kRegisterSize),
    539           Immediate(1));
    540 
    541   // Argument 7: Start (high end) of backtracking stack memory area.
    542   __ Move(kScratchRegister, address_of_regexp_stack_memory_address);
    543   __ movp(r9, Operand(kScratchRegister, 0));
    544   __ Move(kScratchRegister, address_of_regexp_stack_memory_size);
    545   __ addp(r9, Operand(kScratchRegister, 0));
    546   __ movq(Operand(rsp, (argument_slots_on_stack - 3) * kRegisterSize), r9);
    547 
    548   // Argument 6: Set the number of capture registers to zero to force global
    549   // regexps to behave as non-global.  This does not affect non-global regexps.
    550   // Argument 6 is passed in r9 on Linux and on the stack on Windows.
    551 #ifdef _WIN64
    552   __ movq(Operand(rsp, (argument_slots_on_stack - 4) * kRegisterSize),
    553           Immediate(0));
    554 #else
    555   __ Set(r9, 0);
    556 #endif
    557 
    558   // Argument 5: static offsets vector buffer.
    559   __ LoadAddress(
    560       r8, ExternalReference::address_of_static_offsets_vector(isolate()));
    561   // Argument 5 passed in r8 on Linux and on the stack on Windows.
    562 #ifdef _WIN64
    563   __ movq(Operand(rsp, (argument_slots_on_stack - 5) * kRegisterSize), r8);
    564 #endif
    565 
    566   // rdi: subject string
    567   // rbx: previous index
    568   // rcx: encoding of subject string (1 if one_byte 0 if two_byte);
    569   // r11: code
    570   // r14: slice offset
    571   // r15: original subject string
    572 
    573   // Argument 2: Previous index.
    574   __ movp(arg_reg_2, rbx);
    575 
    576   // Argument 4: End of string data
    577   // Argument 3: Start of string data
    578   Label setup_two_byte, setup_rest, got_length, length_not_from_slice;
    579   // Prepare start and end index of the input.
    580   // Load the length from the original sliced string if that is the case.
    581   __ addp(rbx, r14);
    582   __ SmiToInteger32(arg_reg_3, FieldOperand(r15, String::kLengthOffset));
    583   __ addp(r14, arg_reg_3);  // Using arg3 as scratch.
    584 
    585   // rbx: start index of the input
    586   // r14: end index of the input
    587   // r15: original subject string
    588   __ testb(rcx, rcx);  // Last use of rcx as encoding of subject string.
    589   __ j(zero, &setup_two_byte, Label::kNear);
    590   __ leap(arg_reg_4,
    591          FieldOperand(rdi, r14, times_1, SeqOneByteString::kHeaderSize));
    592   __ leap(arg_reg_3,
    593          FieldOperand(rdi, rbx, times_1, SeqOneByteString::kHeaderSize));
    594   __ jmp(&setup_rest, Label::kNear);
    595   __ bind(&setup_two_byte);
    596   __ leap(arg_reg_4,
    597          FieldOperand(rdi, r14, times_2, SeqTwoByteString::kHeaderSize));
    598   __ leap(arg_reg_3,
    599          FieldOperand(rdi, rbx, times_2, SeqTwoByteString::kHeaderSize));
    600   __ bind(&setup_rest);
    601 
    602   // Argument 1: Original subject string.
    603   // The original subject is in the previous stack frame. Therefore we have to
    604   // use rbp, which points exactly to one pointer size below the previous rsp.
    605   // (Because creating a new stack frame pushes the previous rbp onto the stack
    606   // and thereby moves up rsp by one kPointerSize.)
    607   __ movp(arg_reg_1, r15);
    608 
    609   // Locate the code entry and call it.
    610   __ addp(r11, Immediate(Code::kHeaderSize - kHeapObjectTag));
    611   __ call(r11);
    612 
    613   __ LeaveApiExitFrame(true);
    614 
    615   // Check the result.
    616   Label success;
    617   Label exception;
    618   __ cmpl(rax, Immediate(1));
    619   // We expect exactly one result since we force the called regexp to behave
    620   // as non-global.
    621   __ j(equal, &success, Label::kNear);
    622   __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::EXCEPTION));
    623   __ j(equal, &exception);
    624   __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::FAILURE));
    625   // If none of the above, it can only be retry.
    626   // Handle that in the runtime system.
    627   __ j(not_equal, &runtime);
    628 
    629   // For failure return null.
    630   __ LoadRoot(rax, Heap::kNullValueRootIndex);
    631   __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize);
    632 
    633   // Load RegExp data.
    634   __ bind(&success);
    635   __ movp(rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX));
    636   __ movp(rcx, FieldOperand(rax, JSRegExp::kDataOffset));
    637   __ SmiToInteger32(rax,
    638                     FieldOperand(rcx, JSRegExp::kIrregexpCaptureCountOffset));
    639   // Calculate number of capture registers (number_of_captures + 1) * 2.
    640   __ leal(rdx, Operand(rax, rax, times_1, 2));
    641 
    642   // rdx: Number of capture registers
    643   // Check that the last match info is a FixedArray.
    644   __ movp(rbx, args.GetArgumentOperand(LAST_MATCH_INFO_ARGUMENT_INDEX));
    645   __ JumpIfSmi(rbx, &runtime);
    646   // Check that the object has fast elements.
    647   __ movp(rax, FieldOperand(rbx, HeapObject::kMapOffset));
    648   __ CompareRoot(rax, Heap::kFixedArrayMapRootIndex);
    649   __ j(not_equal, &runtime);
    650   // Check that the last match info has space for the capture registers and the
    651   // additional information. Ensure no overflow in add.
    652   STATIC_ASSERT(FixedArray::kMaxLength < kMaxInt - FixedArray::kLengthOffset);
    653   __ SmiToInteger32(rax, FieldOperand(rbx, FixedArray::kLengthOffset));
    654   __ subl(rax, Immediate(RegExpMatchInfo::kLastMatchOverhead));
    655   __ cmpl(rdx, rax);
    656   __ j(greater, &runtime);
    657 
    658   // rbx: last_match_info (FixedArray)
    659   // rdx: number of capture registers
    660   // Store the capture count.
    661   __ Integer32ToSmi(kScratchRegister, rdx);
    662   __ movp(FieldOperand(rbx, RegExpMatchInfo::kNumberOfCapturesOffset),
    663           kScratchRegister);
    664   // Store last subject and last input.
    665   __ movp(rax, args.GetArgumentOperand(SUBJECT_STRING_ARGUMENT_INDEX));
    666   __ movp(FieldOperand(rbx, RegExpMatchInfo::kLastSubjectOffset), rax);
    667   __ movp(rcx, rax);
    668   __ RecordWriteField(rbx, RegExpMatchInfo::kLastSubjectOffset, rax, rdi,
    669                       kDontSaveFPRegs);
    670   __ movp(rax, rcx);
    671   __ movp(FieldOperand(rbx, RegExpMatchInfo::kLastInputOffset), rax);
    672   __ RecordWriteField(rbx, RegExpMatchInfo::kLastInputOffset, rax, rdi,
    673                       kDontSaveFPRegs);
    674 
    675   // Get the static offsets vector filled by the native regexp code.
    676   __ LoadAddress(
    677       rcx, ExternalReference::address_of_static_offsets_vector(isolate()));
    678 
    679   // rbx: last_match_info (FixedArray)
    680   // rcx: offsets vector
    681   // rdx: number of capture registers
    682   Label next_capture, done;
    683   // Capture register counter starts from number of capture registers and
    684   // counts down until wrapping after zero.
    685   __ bind(&next_capture);
    686   __ subp(rdx, Immediate(1));
    687   __ j(negative, &done, Label::kNear);
    688   // Read the value from the static offsets vector buffer and make it a smi.
    689   __ movl(rdi, Operand(rcx, rdx, times_int_size, 0));
    690   __ Integer32ToSmi(rdi, rdi);
    691   // Store the smi value in the last match info.
    692   __ movp(FieldOperand(rbx, rdx, times_pointer_size,
    693                        RegExpMatchInfo::kFirstCaptureOffset),
    694           rdi);
    695   __ jmp(&next_capture);
    696   __ bind(&done);
    697 
    698   // Return last match info.
    699   __ movp(rax, rbx);
    700   __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize);
    701 
    702   __ bind(&exception);
    703   // Result must now be exception. If there is no pending exception already a
    704   // stack overflow (on the backtrack stack) was detected in RegExp code but
    705   // haven't created the exception yet. Handle that in the runtime system.
    706   // TODO(592): Rerunning the RegExp to get the stack overflow exception.
    707   ExternalReference pending_exception_address(
    708       Isolate::kPendingExceptionAddress, isolate());
    709   Operand pending_exception_operand =
    710       masm->ExternalOperand(pending_exception_address, rbx);
    711   __ movp(rax, pending_exception_operand);
    712   __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex);
    713   __ cmpp(rax, rdx);
    714   __ j(equal, &runtime);
    715 
    716   // For exception, throw the exception again.
    717   __ TailCallRuntime(Runtime::kRegExpExecReThrow);
    718 
    719   // Do the runtime call to execute the regexp.
    720   __ bind(&runtime);
    721   __ TailCallRuntime(Runtime::kRegExpExec);
    722 
    723   // Deferred code for string handling.
    724   // (6) Long external string?  If not, go to (10).
    725   __ bind(&not_seq_nor_cons);
    726   // Compare flags are still set from (3).
    727   __ j(greater, &not_long_external, Label::kNear);  // Go to (10).
    728 
    729   // (7) External string.  Short external strings have been ruled out.
    730   __ bind(&external_string);
    731   __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
    732   __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
    733   if (FLAG_debug_code) {
    734     // Assert that we do not have a cons or slice (indirect strings) here.
    735     // Sequential strings have already been ruled out.
    736     __ testb(rbx, Immediate(kIsIndirectStringMask));
    737     __ Assert(zero, kExternalStringExpectedButNotFound);
    738   }
    739   __ movp(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset));
    740   // Move the pointer so that offset-wise, it looks like a sequential string.
    741   STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
    742   __ subp(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
    743   STATIC_ASSERT(kTwoByteStringTag == 0);
    744   // (8) Is the external string one byte?  If yes, go to (5).
    745   __ testb(rbx, Immediate(kStringEncodingMask));
    746   __ j(not_zero, &seq_one_byte_string);  // Go to (5).
    747 
    748   // rdi: subject string (flat two-byte)
    749   // rax: RegExp data (FixedArray)
    750   // (9) Two byte sequential.  Load regexp code for two byte.  Go to (E).
    751   __ bind(&seq_two_byte_string);
    752   __ movp(r11, FieldOperand(rax, JSRegExp::kDataUC16CodeOffset));
    753   __ Set(rcx, 0);  // Type is two byte.
    754   __ jmp(&check_code);  // Go to (E).
    755 
    756   // (10) Not a string or a short external string?  If yes, bail out to runtime.
    757   __ bind(&not_long_external);
    758   // Catch non-string subject or short external string.
    759   STATIC_ASSERT(kNotStringTag != 0 && kShortExternalStringTag !=0);
    760   __ testb(rbx, Immediate(kIsNotStringMask | kShortExternalStringMask));
    761   __ j(not_zero, &runtime);
    762 
    763   // (11) Sliced or thin string.  Replace subject with parent. Go to (1).
    764   Label thin_string;
    765   __ cmpl(rbx, Immediate(kThinStringTag));
    766   __ j(equal, &thin_string, Label::kNear);
    767   // Load offset into r14 and replace subject string with parent.
    768   __ SmiToInteger32(r14, FieldOperand(rdi, SlicedString::kOffsetOffset));
    769   __ movp(rdi, FieldOperand(rdi, SlicedString::kParentOffset));
    770   __ jmp(&check_underlying);
    771 
    772   __ bind(&thin_string);
    773   __ movp(rdi, FieldOperand(rdi, ThinString::kActualOffset));
    774   __ jmp(&check_underlying);
    775 #endif  // V8_INTERPRETED_REGEXP
    776 }
    777 
    778 
    779 static int NegativeComparisonResult(Condition cc) {
    780   DCHECK(cc != equal);
    781   DCHECK((cc == less) || (cc == less_equal)
    782       || (cc == greater) || (cc == greater_equal));
    783   return (cc == greater || cc == greater_equal) ? LESS : GREATER;
    784 }
    785 
    786 
    787 static void CheckInputType(MacroAssembler* masm, Register input,
    788                            CompareICState::State expected, Label* fail) {
    789   Label ok;
    790   if (expected == CompareICState::SMI) {
    791     __ JumpIfNotSmi(input, fail);
    792   } else if (expected == CompareICState::NUMBER) {
    793     __ JumpIfSmi(input, &ok);
    794     __ CompareMap(input, masm->isolate()->factory()->heap_number_map());
    795     __ j(not_equal, fail);
    796   }
    797   // We could be strict about internalized/non-internalized here, but as long as
    798   // hydrogen doesn't care, the stub doesn't have to care either.
    799   __ bind(&ok);
    800 }
    801 
    802 
    803 static void BranchIfNotInternalizedString(MacroAssembler* masm,
    804                                           Label* label,
    805                                           Register object,
    806                                           Register scratch) {
    807   __ JumpIfSmi(object, label);
    808   __ movp(scratch, FieldOperand(object, HeapObject::kMapOffset));
    809   __ movzxbp(scratch,
    810              FieldOperand(scratch, Map::kInstanceTypeOffset));
    811   STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
    812   __ testb(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
    813   __ j(not_zero, label);
    814 }
    815 
    816 
    817 void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
    818   Label runtime_call, check_unequal_objects, done;
    819   Condition cc = GetCondition();
    820   Factory* factory = isolate()->factory();
    821 
    822   Label miss;
    823   CheckInputType(masm, rdx, left(), &miss);
    824   CheckInputType(masm, rax, right(), &miss);
    825 
    826   // Compare two smis.
    827   Label non_smi, smi_done;
    828   __ JumpIfNotBothSmi(rax, rdx, &non_smi);
    829   __ subp(rdx, rax);
    830   __ j(no_overflow, &smi_done);
    831   __ notp(rdx);  // Correct sign in case of overflow. rdx cannot be 0 here.
    832   __ bind(&smi_done);
    833   __ movp(rax, rdx);
    834   __ ret(0);
    835   __ bind(&non_smi);
    836 
    837   // The compare stub returns a positive, negative, or zero 64-bit integer
    838   // value in rax, corresponding to result of comparing the two inputs.
    839   // NOTICE! This code is only reached after a smi-fast-case check, so
    840   // it is certain that at least one operand isn't a smi.
    841 
    842   // Two identical objects are equal unless they are both NaN or undefined.
    843   {
    844     Label not_identical;
    845     __ cmpp(rax, rdx);
    846     __ j(not_equal, &not_identical, Label::kNear);
    847 
    848     if (cc != equal) {
    849       // Check for undefined.  undefined OP undefined is false even though
    850       // undefined == undefined.
    851       __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex);
    852       Label check_for_nan;
    853       __ j(not_equal, &check_for_nan, Label::kNear);
    854       __ Set(rax, NegativeComparisonResult(cc));
    855       __ ret(0);
    856       __ bind(&check_for_nan);
    857     }
    858 
    859     // Test for NaN. Sadly, we can't just compare to Factory::nan_value(),
    860     // so we do the second best thing - test it ourselves.
    861     Label heap_number;
    862     // If it's not a heap number, then return equal for (in)equality operator.
    863     __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
    864            factory->heap_number_map());
    865     __ j(equal, &heap_number, Label::kNear);
    866     if (cc != equal) {
    867       __ movp(rcx, FieldOperand(rax, HeapObject::kMapOffset));
    868       __ movzxbl(rcx, FieldOperand(rcx, Map::kInstanceTypeOffset));
    869       // Call runtime on identical objects.  Otherwise return equal.
    870       __ cmpb(rcx, Immediate(static_cast<uint8_t>(FIRST_JS_RECEIVER_TYPE)));
    871       __ j(above_equal, &runtime_call, Label::kFar);
    872       // Call runtime on identical symbols since we need to throw a TypeError.
    873       __ cmpb(rcx, Immediate(static_cast<uint8_t>(SYMBOL_TYPE)));
    874       __ j(equal, &runtime_call, Label::kFar);
    875     }
    876     __ Set(rax, EQUAL);
    877     __ ret(0);
    878 
    879     __ bind(&heap_number);
    880     // It is a heap number, so return  equal if it's not NaN.
    881     // For NaN, return 1 for every condition except greater and
    882     // greater-equal.  Return -1 for them, so the comparison yields
    883     // false for all conditions except not-equal.
    884     __ Set(rax, EQUAL);
    885     __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
    886     __ Ucomisd(xmm0, xmm0);
    887     __ setcc(parity_even, rax);
    888     // rax is 0 for equal non-NaN heapnumbers, 1 for NaNs.
    889     if (cc == greater_equal || cc == greater) {
    890       __ negp(rax);
    891     }
    892     __ ret(0);
    893 
    894     __ bind(&not_identical);
    895   }
    896 
    897   if (cc == equal) {  // Both strict and non-strict.
    898     Label slow;  // Fallthrough label.
    899 
    900     // If we're doing a strict equality comparison, we don't have to do
    901     // type conversion, so we generate code to do fast comparison for objects
    902     // and oddballs. Non-smi numbers and strings still go through the usual
    903     // slow-case code.
    904     if (strict()) {
    905       // If either is a Smi (we know that not both are), then they can only
    906       // be equal if the other is a HeapNumber. If so, use the slow case.
    907       {
    908         Label not_smis;
    909         __ SelectNonSmi(rbx, rax, rdx, &not_smis);
    910 
    911         // Check if the non-smi operand is a heap number.
    912         __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
    913                factory->heap_number_map());
    914         // If heap number, handle it in the slow case.
    915         __ j(equal, &slow);
    916         // Return non-equal.  ebx (the lower half of rbx) is not zero.
    917         __ movp(rax, rbx);
    918         __ ret(0);
    919 
    920         __ bind(&not_smis);
    921       }
    922 
    923       // If either operand is a JSObject or an oddball value, then they are not
    924       // equal since their pointers are different
    925       // There is no test for undetectability in strict equality.
    926 
    927       // If the first object is a JS object, we have done pointer comparison.
    928       STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
    929       Label first_non_object;
    930       __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx);
    931       __ j(below, &first_non_object, Label::kNear);
    932       // Return non-zero (rax (not rax) is not zero)
    933       Label return_not_equal;
    934       STATIC_ASSERT(kHeapObjectTag != 0);
    935       __ bind(&return_not_equal);
    936       __ ret(0);
    937 
    938       __ bind(&first_non_object);
    939       // Check for oddballs: true, false, null, undefined.
    940       __ CmpInstanceType(rcx, ODDBALL_TYPE);
    941       __ j(equal, &return_not_equal);
    942 
    943       __ CmpObjectType(rdx, FIRST_JS_RECEIVER_TYPE, rcx);
    944       __ j(above_equal, &return_not_equal);
    945 
    946       // Check for oddballs: true, false, null, undefined.
    947       __ CmpInstanceType(rcx, ODDBALL_TYPE);
    948       __ j(equal, &return_not_equal);
    949 
    950       // Fall through to the general case.
    951     }
    952     __ bind(&slow);
    953   }
    954 
    955   // Generate the number comparison code.
    956   Label non_number_comparison;
    957   Label unordered;
    958   FloatingPointHelper::LoadSSE2UnknownOperands(masm, &non_number_comparison);
    959   __ xorl(rax, rax);
    960   __ xorl(rcx, rcx);
    961   __ Ucomisd(xmm0, xmm1);
    962 
    963   // Don't base result on EFLAGS when a NaN is involved.
    964   __ j(parity_even, &unordered, Label::kNear);
    965   // Return a result of -1, 0, or 1, based on EFLAGS.
    966   __ setcc(above, rax);
    967   __ setcc(below, rcx);
    968   __ subp(rax, rcx);
    969   __ ret(0);
    970 
    971   // If one of the numbers was NaN, then the result is always false.
    972   // The cc is never not-equal.
    973   __ bind(&unordered);
    974   DCHECK(cc != not_equal);
    975   if (cc == less || cc == less_equal) {
    976     __ Set(rax, 1);
    977   } else {
    978     __ Set(rax, -1);
    979   }
    980   __ ret(0);
    981 
    982   // The number comparison code did not provide a valid result.
    983   __ bind(&non_number_comparison);
    984 
    985   // Fast negative check for internalized-to-internalized equality.
    986   Label check_for_strings;
    987   if (cc == equal) {
    988     BranchIfNotInternalizedString(
    989         masm, &check_for_strings, rax, kScratchRegister);
    990     BranchIfNotInternalizedString(
    991         masm, &check_for_strings, rdx, kScratchRegister);
    992 
    993     // We've already checked for object identity, so if both operands are
    994     // internalized strings they aren't equal. Register rax (not rax) already
    995     // holds a non-zero value, which indicates not equal, so just return.
    996     __ ret(0);
    997   }
    998 
    999   __ bind(&check_for_strings);
   1000 
   1001   __ JumpIfNotBothSequentialOneByteStrings(rdx, rax, rcx, rbx,
   1002                                            &check_unequal_objects);
   1003 
   1004   // Inline comparison of one-byte strings.
   1005   if (cc == equal) {
   1006     StringHelper::GenerateFlatOneByteStringEquals(masm, rdx, rax, rcx, rbx);
   1007   } else {
   1008     StringHelper::GenerateCompareFlatOneByteStrings(masm, rdx, rax, rcx, rbx,
   1009                                                     rdi, r8);
   1010   }
   1011 
   1012 #ifdef DEBUG
   1013   __ Abort(kUnexpectedFallThroughFromStringComparison);
   1014 #endif
   1015 
   1016   __ bind(&check_unequal_objects);
   1017   if (cc == equal && !strict()) {
   1018     // Not strict equality.  Objects are unequal if
   1019     // they are both JSObjects and not undetectable,
   1020     // and their pointers are different.
   1021     Label return_equal, return_unequal, undetectable;
   1022     // At most one is a smi, so we can test for smi by adding the two.
   1023     // A smi plus a heap object has the low bit set, a heap object plus
   1024     // a heap object has the low bit clear.
   1025     STATIC_ASSERT(kSmiTag == 0);
   1026     STATIC_ASSERT(kSmiTagMask == 1);
   1027     __ leap(rcx, Operand(rax, rdx, times_1, 0));
   1028     __ testb(rcx, Immediate(kSmiTagMask));
   1029     __ j(not_zero, &runtime_call, Label::kNear);
   1030 
   1031     __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset));
   1032     __ movp(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
   1033     __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
   1034              Immediate(1 << Map::kIsUndetectable));
   1035     __ j(not_zero, &undetectable, Label::kNear);
   1036     __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
   1037              Immediate(1 << Map::kIsUndetectable));
   1038     __ j(not_zero, &return_unequal, Label::kNear);
   1039 
   1040     __ CmpInstanceType(rbx, FIRST_JS_RECEIVER_TYPE);
   1041     __ j(below, &runtime_call, Label::kNear);
   1042     __ CmpInstanceType(rcx, FIRST_JS_RECEIVER_TYPE);
   1043     __ j(below, &runtime_call, Label::kNear);
   1044 
   1045     __ bind(&return_unequal);
   1046     // Return non-equal by returning the non-zero object pointer in rax.
   1047     __ ret(0);
   1048 
   1049     __ bind(&undetectable);
   1050     __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
   1051              Immediate(1 << Map::kIsUndetectable));
   1052     __ j(zero, &return_unequal, Label::kNear);
   1053 
   1054     // If both sides are JSReceivers, then the result is false according to
   1055     // the HTML specification, which says that only comparisons with null or
   1056     // undefined are affected by special casing for document.all.
   1057     __ CmpInstanceType(rbx, ODDBALL_TYPE);
   1058     __ j(zero, &return_equal, Label::kNear);
   1059     __ CmpInstanceType(rcx, ODDBALL_TYPE);
   1060     __ j(not_zero, &return_unequal, Label::kNear);
   1061 
   1062     __ bind(&return_equal);
   1063     __ Set(rax, EQUAL);
   1064     __ ret(0);
   1065   }
   1066   __ bind(&runtime_call);
   1067 
   1068   if (cc == equal) {
   1069     {
   1070       FrameScope scope(masm, StackFrame::INTERNAL);
   1071       __ Push(rsi);
   1072       __ Call(strict() ? isolate()->builtins()->StrictEqual()
   1073                        : isolate()->builtins()->Equal(),
   1074               RelocInfo::CODE_TARGET);
   1075       __ Pop(rsi);
   1076     }
   1077     // Turn true into 0 and false into some non-zero value.
   1078     STATIC_ASSERT(EQUAL == 0);
   1079     __ LoadRoot(rdx, Heap::kTrueValueRootIndex);
   1080     __ subp(rax, rdx);
   1081     __ Ret();
   1082   } else {
   1083     // Push arguments below the return address to prepare jump to builtin.
   1084     __ PopReturnAddressTo(rcx);
   1085     __ Push(rdx);
   1086     __ Push(rax);
   1087     __ Push(Smi::FromInt(NegativeComparisonResult(cc)));
   1088     __ PushReturnAddressFrom(rcx);
   1089     __ TailCallRuntime(Runtime::kCompare);
   1090   }
   1091 
   1092   __ bind(&miss);
   1093   GenerateMiss(masm);
   1094 }
   1095 
   1096 
   1097 static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub) {
   1098   // rax : number of arguments to the construct function
   1099   // rbx : feedback vector
   1100   // rdx : slot in feedback vector (Smi)
   1101   // rdi : the function to call
   1102   FrameScope scope(masm, StackFrame::INTERNAL);
   1103 
   1104   // Number-of-arguments register must be smi-tagged to call out.
   1105   __ Integer32ToSmi(rax, rax);
   1106   __ Push(rax);
   1107   __ Push(rdi);
   1108   __ Integer32ToSmi(rdx, rdx);
   1109   __ Push(rdx);
   1110   __ Push(rbx);
   1111   __ Push(rsi);
   1112 
   1113   __ CallStub(stub);
   1114 
   1115   __ Pop(rsi);
   1116   __ Pop(rbx);
   1117   __ Pop(rdx);
   1118   __ Pop(rdi);
   1119   __ Pop(rax);
   1120   __ SmiToInteger32(rdx, rdx);
   1121   __ SmiToInteger32(rax, rax);
   1122 }
   1123 
   1124 
   1125 static void GenerateRecordCallTarget(MacroAssembler* masm) {
   1126   // Cache the called function in a feedback vector slot.  Cache states
   1127   // are uninitialized, monomorphic (indicated by a JSFunction), and
   1128   // megamorphic.
   1129   // rax : number of arguments to the construct function
   1130   // rbx : feedback vector
   1131   // rdx : slot in feedback vector (Smi)
   1132   // rdi : the function to call
   1133   Isolate* isolate = masm->isolate();
   1134   Label initialize, done, miss, megamorphic, not_array_function;
   1135 
   1136   // Load the cache state into r11.
   1137   __ SmiToInteger32(rdx, rdx);
   1138   __ movp(r11,
   1139           FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize));
   1140 
   1141   // A monomorphic cache hit or an already megamorphic state: invoke the
   1142   // function without changing the state.
   1143   // We don't know if r11 is a WeakCell or a Symbol, but it's harmless to read
   1144   // at this position in a symbol (see static asserts in feedback-vector.h).
   1145   Label check_allocation_site;
   1146   __ cmpp(rdi, FieldOperand(r11, WeakCell::kValueOffset));
   1147   __ j(equal, &done, Label::kFar);
   1148   __ CompareRoot(r11, Heap::kmegamorphic_symbolRootIndex);
   1149   __ j(equal, &done, Label::kFar);
   1150   __ CompareRoot(FieldOperand(r11, HeapObject::kMapOffset),
   1151                  Heap::kWeakCellMapRootIndex);
   1152   __ j(not_equal, &check_allocation_site);
   1153 
   1154   // If the weak cell is cleared, we have a new chance to become monomorphic.
   1155   __ CheckSmi(FieldOperand(r11, WeakCell::kValueOffset));
   1156   __ j(equal, &initialize);
   1157   __ jmp(&megamorphic);
   1158 
   1159   __ bind(&check_allocation_site);
   1160   // If we came here, we need to see if we are the array function.
   1161   // If we didn't have a matching function, and we didn't find the megamorph
   1162   // sentinel, then we have in the slot either some other function or an
   1163   // AllocationSite.
   1164   __ CompareRoot(FieldOperand(r11, 0), Heap::kAllocationSiteMapRootIndex);
   1165   __ j(not_equal, &miss);
   1166 
   1167   // Make sure the function is the Array() function
   1168   __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r11);
   1169   __ cmpp(rdi, r11);
   1170   __ j(not_equal, &megamorphic);
   1171   __ jmp(&done);
   1172 
   1173   __ bind(&miss);
   1174 
   1175   // A monomorphic miss (i.e, here the cache is not uninitialized) goes
   1176   // megamorphic.
   1177   __ CompareRoot(r11, Heap::kuninitialized_symbolRootIndex);
   1178   __ j(equal, &initialize);
   1179   // MegamorphicSentinel is an immortal immovable object (undefined) so no
   1180   // write-barrier is needed.
   1181   __ bind(&megamorphic);
   1182   __ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize),
   1183           FeedbackVector::MegamorphicSentinel(isolate));
   1184   __ jmp(&done);
   1185 
   1186   // An uninitialized cache is patched with the function or sentinel to
   1187   // indicate the ElementsKind if function is the Array constructor.
   1188   __ bind(&initialize);
   1189 
   1190   // Make sure the function is the Array() function
   1191   __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r11);
   1192   __ cmpp(rdi, r11);
   1193   __ j(not_equal, &not_array_function);
   1194 
   1195   CreateAllocationSiteStub create_stub(isolate);
   1196   CallStubInRecordCallTarget(masm, &create_stub);
   1197   __ jmp(&done);
   1198 
   1199   __ bind(&not_array_function);
   1200   CreateWeakCellStub weak_cell_stub(isolate);
   1201   CallStubInRecordCallTarget(masm, &weak_cell_stub);
   1202 
   1203   __ bind(&done);
   1204   // Increment the call count for all function calls.
   1205   __ SmiAddConstant(FieldOperand(rbx, rdx, times_pointer_size,
   1206                                  FixedArray::kHeaderSize + kPointerSize),
   1207                     Smi::FromInt(1));
   1208 }
   1209 
   1210 
   1211 void CallConstructStub::Generate(MacroAssembler* masm) {
   1212   // rax : number of arguments
   1213   // rbx : feedback vector
   1214   // rdx : slot in feedback vector (Smi)
   1215   // rdi : constructor function
   1216 
   1217   Label non_function;
   1218   // Check that the constructor is not a smi.
   1219   __ JumpIfSmi(rdi, &non_function);
   1220   // Check that constructor is a JSFunction.
   1221   __ CmpObjectType(rdi, JS_FUNCTION_TYPE, r11);
   1222   __ j(not_equal, &non_function);
   1223 
   1224   GenerateRecordCallTarget(masm);
   1225 
   1226   Label feedback_register_initialized;
   1227   // Put the AllocationSite from the feedback vector into rbx, or undefined.
   1228   __ movp(rbx,
   1229           FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize));
   1230   __ CompareRoot(FieldOperand(rbx, 0), Heap::kAllocationSiteMapRootIndex);
   1231   __ j(equal, &feedback_register_initialized, Label::kNear);
   1232   __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
   1233   __ bind(&feedback_register_initialized);
   1234 
   1235   __ AssertUndefinedOrAllocationSite(rbx);
   1236 
   1237   // Pass new target to construct stub.
   1238   __ movp(rdx, rdi);
   1239 
   1240   // Tail call to the function-specific construct stub (still in the caller
   1241   // context at this point).
   1242   __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
   1243   __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kConstructStubOffset));
   1244   __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
   1245   __ jmp(rcx);
   1246 
   1247   __ bind(&non_function);
   1248   __ movp(rdx, rdi);
   1249   __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
   1250 }
   1251 
   1252 bool CEntryStub::NeedsImmovableCode() {
   1253   return false;
   1254 }
   1255 
   1256 
   1257 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
   1258   CEntryStub::GenerateAheadOfTime(isolate);
   1259   StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
   1260   StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
   1261   // It is important that the store buffer overflow stubs are generated first.
   1262   CommonArrayConstructorStub::GenerateStubsAheadOfTime(isolate);
   1263   CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
   1264   CreateWeakCellStub::GenerateAheadOfTime(isolate);
   1265   BinaryOpICStub::GenerateAheadOfTime(isolate);
   1266   BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
   1267   StoreFastElementStub::GenerateAheadOfTime(isolate);
   1268 }
   1269 
   1270 
   1271 void CodeStub::GenerateFPStubs(Isolate* isolate) {
   1272 }
   1273 
   1274 
   1275 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
   1276   CEntryStub stub(isolate, 1, kDontSaveFPRegs);
   1277   stub.GetCode();
   1278   CEntryStub save_doubles(isolate, 1, kSaveFPRegs);
   1279   save_doubles.GetCode();
   1280 }
   1281 
   1282 
   1283 void CEntryStub::Generate(MacroAssembler* masm) {
   1284   // rax: number of arguments including receiver
   1285   // rbx: pointer to C function  (C callee-saved)
   1286   // rbp: frame pointer of calling JS frame (restored after C call)
   1287   // rsp: stack pointer  (restored after C call)
   1288   // rsi: current context (restored)
   1289   //
   1290   // If argv_in_register():
   1291   // r15: pointer to the first argument
   1292 
   1293   ProfileEntryHookStub::MaybeCallEntryHook(masm);
   1294 
   1295 #ifdef _WIN64
   1296   // Windows 64-bit ABI passes arguments in rcx, rdx, r8, r9. It requires the
   1297   // stack to be aligned to 16 bytes. It only allows a single-word to be
   1298   // returned in register rax. Larger return sizes must be written to an address
   1299   // passed as a hidden first argument.
   1300   const Register kCCallArg0 = rcx;
   1301   const Register kCCallArg1 = rdx;
   1302   const Register kCCallArg2 = r8;
   1303   const Register kCCallArg3 = r9;
   1304   const int kArgExtraStackSpace = 2;
   1305   const int kMaxRegisterResultSize = 1;
   1306 #else
   1307   // GCC / Clang passes arguments in rdi, rsi, rdx, rcx, r8, r9. Simple results
   1308   // are returned in rax, and a struct of two pointers are returned in rax+rdx.
   1309   // Larger return sizes must be written to an address passed as a hidden first
   1310   // argument.
   1311   const Register kCCallArg0 = rdi;
   1312   const Register kCCallArg1 = rsi;
   1313   const Register kCCallArg2 = rdx;
   1314   const Register kCCallArg3 = rcx;
   1315   const int kArgExtraStackSpace = 0;
   1316   const int kMaxRegisterResultSize = 2;
   1317 #endif  // _WIN64
   1318 
   1319   // Enter the exit frame that transitions from JavaScript to C++.
   1320   int arg_stack_space =
   1321       kArgExtraStackSpace +
   1322       (result_size() <= kMaxRegisterResultSize ? 0 : result_size());
   1323   if (argv_in_register()) {
   1324     DCHECK(!save_doubles());
   1325     DCHECK(!is_builtin_exit());
   1326     __ EnterApiExitFrame(arg_stack_space);
   1327     // Move argc into r14 (argv is already in r15).
   1328     __ movp(r14, rax);
   1329   } else {
   1330     __ EnterExitFrame(
   1331         arg_stack_space, save_doubles(),
   1332         is_builtin_exit() ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
   1333   }
   1334 
   1335   // rbx: pointer to builtin function  (C callee-saved).
   1336   // rbp: frame pointer of exit frame  (restored after C call).
   1337   // rsp: stack pointer (restored after C call).
   1338   // r14: number of arguments including receiver (C callee-saved).
   1339   // r15: argv pointer (C callee-saved).
   1340 
   1341   // Check stack alignment.
   1342   if (FLAG_debug_code) {
   1343     __ CheckStackAlignment();
   1344   }
   1345 
   1346   // Call C function. The arguments object will be created by stubs declared by
   1347   // DECLARE_RUNTIME_FUNCTION().
   1348   if (result_size() <= kMaxRegisterResultSize) {
   1349     // Pass a pointer to the Arguments object as the first argument.
   1350     // Return result in single register (rax), or a register pair (rax, rdx).
   1351     __ movp(kCCallArg0, r14);  // argc.
   1352     __ movp(kCCallArg1, r15);  // argv.
   1353     __ Move(kCCallArg2, ExternalReference::isolate_address(isolate()));
   1354   } else {
   1355     DCHECK_LE(result_size(), 3);
   1356     // Pass a pointer to the result location as the first argument.
   1357     __ leap(kCCallArg0, StackSpaceOperand(kArgExtraStackSpace));
   1358     // Pass a pointer to the Arguments object as the second argument.
   1359     __ movp(kCCallArg1, r14);  // argc.
   1360     __ movp(kCCallArg2, r15);  // argv.
   1361     __ Move(kCCallArg3, ExternalReference::isolate_address(isolate()));
   1362   }
   1363   __ call(rbx);
   1364 
   1365   if (result_size() > kMaxRegisterResultSize) {
   1366     // Read result values stored on stack. Result is stored
   1367     // above the the two Arguments object slots on Win64.
   1368     DCHECK_LE(result_size(), 3);
   1369     __ movq(kReturnRegister0, StackSpaceOperand(kArgExtraStackSpace + 0));
   1370     __ movq(kReturnRegister1, StackSpaceOperand(kArgExtraStackSpace + 1));
   1371     if (result_size() > 2) {
   1372       __ movq(kReturnRegister2, StackSpaceOperand(kArgExtraStackSpace + 2));
   1373     }
   1374   }
   1375   // Result is in rax, rdx:rax or r8:rdx:rax - do not destroy these registers!
   1376 
   1377   // Check result for exception sentinel.
   1378   Label exception_returned;
   1379   __ CompareRoot(rax, Heap::kExceptionRootIndex);
   1380   __ j(equal, &exception_returned);
   1381 
   1382   // Check that there is no pending exception, otherwise we
   1383   // should have returned the exception sentinel.
   1384   if (FLAG_debug_code) {
   1385     Label okay;
   1386     __ LoadRoot(r14, Heap::kTheHoleValueRootIndex);
   1387     ExternalReference pending_exception_address(
   1388         Isolate::kPendingExceptionAddress, isolate());
   1389     Operand pending_exception_operand =
   1390         masm->ExternalOperand(pending_exception_address);
   1391     __ cmpp(r14, pending_exception_operand);
   1392     __ j(equal, &okay, Label::kNear);
   1393     __ int3();
   1394     __ bind(&okay);
   1395   }
   1396 
   1397   // Exit the JavaScript to C++ exit frame.
   1398   __ LeaveExitFrame(save_doubles(), !argv_in_register());
   1399   __ ret(0);
   1400 
   1401   // Handling of exception.
   1402   __ bind(&exception_returned);
   1403 
   1404   ExternalReference pending_handler_context_address(
   1405       Isolate::kPendingHandlerContextAddress, isolate());
   1406   ExternalReference pending_handler_code_address(
   1407       Isolate::kPendingHandlerCodeAddress, isolate());
   1408   ExternalReference pending_handler_offset_address(
   1409       Isolate::kPendingHandlerOffsetAddress, isolate());
   1410   ExternalReference pending_handler_fp_address(
   1411       Isolate::kPendingHandlerFPAddress, isolate());
   1412   ExternalReference pending_handler_sp_address(
   1413       Isolate::kPendingHandlerSPAddress, isolate());
   1414 
   1415   // Ask the runtime for help to determine the handler. This will set rax to
   1416   // contain the current pending exception, don't clobber it.
   1417   ExternalReference find_handler(Runtime::kUnwindAndFindExceptionHandler,
   1418                                  isolate());
   1419   {
   1420     FrameScope scope(masm, StackFrame::MANUAL);
   1421     __ movp(arg_reg_1, Immediate(0));  // argc.
   1422     __ movp(arg_reg_2, Immediate(0));  // argv.
   1423     __ Move(arg_reg_3, ExternalReference::isolate_address(isolate()));
   1424     __ PrepareCallCFunction(3);
   1425     __ CallCFunction(find_handler, 3);
   1426   }
   1427 
   1428   // Retrieve the handler context, SP and FP.
   1429   __ movp(rsi, masm->ExternalOperand(pending_handler_context_address));
   1430   __ movp(rsp, masm->ExternalOperand(pending_handler_sp_address));
   1431   __ movp(rbp, masm->ExternalOperand(pending_handler_fp_address));
   1432 
   1433   // If the handler is a JS frame, restore the context to the frame. Note that
   1434   // the context will be set to (rsi == 0) for non-JS frames.
   1435   Label skip;
   1436   __ testp(rsi, rsi);
   1437   __ j(zero, &skip, Label::kNear);
   1438   __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
   1439   __ bind(&skip);
   1440 
   1441   // Compute the handler entry address and jump to it.
   1442   __ movp(rdi, masm->ExternalOperand(pending_handler_code_address));
   1443   __ movp(rdx, masm->ExternalOperand(pending_handler_offset_address));
   1444   __ leap(rdi, FieldOperand(rdi, rdx, times_1, Code::kHeaderSize));
   1445   __ jmp(rdi);
   1446 }
   1447 
   1448 
   1449 void JSEntryStub::Generate(MacroAssembler* masm) {
   1450   Label invoke, handler_entry, exit;
   1451   Label not_outermost_js, not_outermost_js_2;
   1452 
   1453   ProfileEntryHookStub::MaybeCallEntryHook(masm);
   1454 
   1455   {  // NOLINT. Scope block confuses linter.
   1456     MacroAssembler::NoRootArrayScope uninitialized_root_register(masm);
   1457     // Set up frame.
   1458     __ pushq(rbp);
   1459     __ movp(rbp, rsp);
   1460 
   1461     // Push the stack frame type.
   1462     __ Push(Immediate(StackFrame::TypeToMarker(type())));  // context slot
   1463     ExternalReference context_address(Isolate::kContextAddress, isolate());
   1464     __ Load(kScratchRegister, context_address);
   1465     __ Push(kScratchRegister);  // context
   1466     // Save callee-saved registers (X64/X32/Win64 calling conventions).
   1467     __ pushq(r12);
   1468     __ pushq(r13);
   1469     __ pushq(r14);
   1470     __ pushq(r15);
   1471 #ifdef _WIN64
   1472     __ pushq(rdi);  // Only callee save in Win64 ABI, argument in AMD64 ABI.
   1473     __ pushq(rsi);  // Only callee save in Win64 ABI, argument in AMD64 ABI.
   1474 #endif
   1475     __ pushq(rbx);
   1476 
   1477 #ifdef _WIN64
   1478     // On Win64 XMM6-XMM15 are callee-save
   1479     __ subp(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize));
   1480     __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0), xmm6);
   1481     __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1), xmm7);
   1482     __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2), xmm8);
   1483     __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3), xmm9);
   1484     __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4), xmm10);
   1485     __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5), xmm11);
   1486     __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6), xmm12);
   1487     __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7), xmm13);
   1488     __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8), xmm14);
   1489     __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9), xmm15);
   1490 #endif
   1491 
   1492     // Set up the roots and smi constant registers.
   1493     // Needs to be done before any further smi loads.
   1494     __ InitializeRootRegister();
   1495   }
   1496 
   1497   // Save copies of the top frame descriptor on the stack.
   1498   ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate());
   1499   {
   1500     Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
   1501     __ Push(c_entry_fp_operand);
   1502   }
   1503 
   1504   // If this is the outermost JS call, set js_entry_sp value.
   1505   ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate());
   1506   __ Load(rax, js_entry_sp);
   1507   __ testp(rax, rax);
   1508   __ j(not_zero, &not_outermost_js);
   1509   __ Push(Immediate(StackFrame::OUTERMOST_JSENTRY_FRAME));
   1510   __ movp(rax, rbp);
   1511   __ Store(js_entry_sp, rax);
   1512   Label cont;
   1513   __ jmp(&cont);
   1514   __ bind(&not_outermost_js);
   1515   __ Push(Immediate(StackFrame::INNER_JSENTRY_FRAME));
   1516   __ bind(&cont);
   1517 
   1518   // Jump to a faked try block that does the invoke, with a faked catch
   1519   // block that sets the pending exception.
   1520   __ jmp(&invoke);
   1521   __ bind(&handler_entry);
   1522   handler_offset_ = handler_entry.pos();
   1523   // Caught exception: Store result (exception) in the pending exception
   1524   // field in the JSEnv and return a failure sentinel.
   1525   ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
   1526                                       isolate());
   1527   __ Store(pending_exception, rax);
   1528   __ LoadRoot(rax, Heap::kExceptionRootIndex);
   1529   __ jmp(&exit);
   1530 
   1531   // Invoke: Link this frame into the handler chain.
   1532   __ bind(&invoke);
   1533   __ PushStackHandler();
   1534 
   1535   // Fake a receiver (NULL).
   1536   __ Push(Immediate(0));  // receiver
   1537 
   1538   // Invoke the function by calling through JS entry trampoline builtin and
   1539   // pop the faked function when we return. We load the address from an
   1540   // external reference instead of inlining the call target address directly
   1541   // in the code, because the builtin stubs may not have been generated yet
   1542   // at the time this code is generated.
   1543   if (type() == StackFrame::ENTRY_CONSTRUCT) {
   1544     ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
   1545                                       isolate());
   1546     __ Load(rax, construct_entry);
   1547   } else {
   1548     ExternalReference entry(Builtins::kJSEntryTrampoline, isolate());
   1549     __ Load(rax, entry);
   1550   }
   1551   __ leap(kScratchRegister, FieldOperand(rax, Code::kHeaderSize));
   1552   __ call(kScratchRegister);
   1553 
   1554   // Unlink this frame from the handler chain.
   1555   __ PopStackHandler();
   1556 
   1557   __ bind(&exit);
   1558   // Check if the current stack frame is marked as the outermost JS frame.
   1559   __ Pop(rbx);
   1560   __ cmpp(rbx, Immediate(StackFrame::OUTERMOST_JSENTRY_FRAME));
   1561   __ j(not_equal, &not_outermost_js_2);
   1562   __ Move(kScratchRegister, js_entry_sp);
   1563   __ movp(Operand(kScratchRegister, 0), Immediate(0));
   1564   __ bind(&not_outermost_js_2);
   1565 
   1566   // Restore the top frame descriptor from the stack.
   1567   { Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
   1568     __ Pop(c_entry_fp_operand);
   1569   }
   1570 
   1571   // Restore callee-saved registers (X64 conventions).
   1572 #ifdef _WIN64
   1573   // On Win64 XMM6-XMM15 are callee-save
   1574   __ movdqu(xmm6, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0));
   1575   __ movdqu(xmm7, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1));
   1576   __ movdqu(xmm8, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2));
   1577   __ movdqu(xmm9, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3));
   1578   __ movdqu(xmm10, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4));
   1579   __ movdqu(xmm11, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5));
   1580   __ movdqu(xmm12, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6));
   1581   __ movdqu(xmm13, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7));
   1582   __ movdqu(xmm14, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8));
   1583   __ movdqu(xmm15, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9));
   1584   __ addp(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize));
   1585 #endif
   1586 
   1587   __ popq(rbx);
   1588 #ifdef _WIN64
   1589   // Callee save on in Win64 ABI, arguments/volatile in AMD64 ABI.
   1590   __ popq(rsi);
   1591   __ popq(rdi);
   1592 #endif
   1593   __ popq(r15);
   1594   __ popq(r14);
   1595   __ popq(r13);
   1596   __ popq(r12);
   1597   __ addp(rsp, Immediate(2 * kPointerSize));  // remove markers
   1598 
   1599   // Restore frame pointer and return.
   1600   __ popq(rbp);
   1601   __ ret(0);
   1602 }
   1603 
   1604 
   1605 // -------------------------------------------------------------------------
   1606 // StringCharCodeAtGenerator
   1607 
   1608 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
   1609   // If the receiver is a smi trigger the non-string case.
   1610   if (check_mode_ == RECEIVER_IS_UNKNOWN) {
   1611     __ JumpIfSmi(object_, receiver_not_string_);
   1612 
   1613     // Fetch the instance type of the receiver into result register.
   1614     __ movp(result_, FieldOperand(object_, HeapObject::kMapOffset));
   1615     __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
   1616     // If the receiver is not a string trigger the non-string case.
   1617     __ testb(result_, Immediate(kIsNotStringMask));
   1618     __ j(not_zero, receiver_not_string_);
   1619   }
   1620 
   1621   // If the index is non-smi trigger the non-smi case.
   1622   __ JumpIfNotSmi(index_, &index_not_smi_);
   1623   __ bind(&got_smi_index_);
   1624 
   1625   // Check for index out of range.
   1626   __ SmiCompare(index_, FieldOperand(object_, String::kLengthOffset));
   1627   __ j(above_equal, index_out_of_range_);
   1628 
   1629   __ SmiToInteger32(index_, index_);
   1630 
   1631   StringCharLoadGenerator::Generate(
   1632       masm, object_, index_, result_, &call_runtime_);
   1633 
   1634   __ Integer32ToSmi(result_, result_);
   1635   __ bind(&exit_);
   1636 }
   1637 
   1638 
   1639 void StringCharCodeAtGenerator::GenerateSlow(
   1640     MacroAssembler* masm, EmbedMode embed_mode,
   1641     const RuntimeCallHelper& call_helper) {
   1642   __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
   1643 
   1644   Factory* factory = masm->isolate()->factory();
   1645   // Index is not a smi.
   1646   __ bind(&index_not_smi_);
   1647   // If index is a heap number, try converting it to an integer.
   1648   __ CheckMap(index_,
   1649               factory->heap_number_map(),
   1650               index_not_number_,
   1651               DONT_DO_SMI_CHECK);
   1652   call_helper.BeforeCall(masm);
   1653   if (embed_mode == PART_OF_IC_HANDLER) {
   1654     __ Push(LoadWithVectorDescriptor::VectorRegister());
   1655     __ Push(LoadDescriptor::SlotRegister());
   1656   }
   1657   __ Push(object_);
   1658   __ Push(index_);  // Consumed by runtime conversion function.
   1659   __ CallRuntime(Runtime::kNumberToSmi);
   1660   if (!index_.is(rax)) {
   1661     // Save the conversion result before the pop instructions below
   1662     // have a chance to overwrite it.
   1663     __ movp(index_, rax);
   1664   }
   1665   __ Pop(object_);
   1666   if (embed_mode == PART_OF_IC_HANDLER) {
   1667     __ Pop(LoadDescriptor::SlotRegister());
   1668     __ Pop(LoadWithVectorDescriptor::VectorRegister());
   1669   }
   1670   // Reload the instance type.
   1671   __ movp(result_, FieldOperand(object_, HeapObject::kMapOffset));
   1672   __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
   1673   call_helper.AfterCall(masm);
   1674   // If index is still not a smi, it must be out of range.
   1675   __ JumpIfNotSmi(index_, index_out_of_range_);
   1676   // Otherwise, return to the fast path.
   1677   __ jmp(&got_smi_index_);
   1678 
   1679   // Call runtime. We get here when the receiver is a string and the
   1680   // index is a number, but the code of getting the actual character
   1681   // is too complex (e.g., when the string needs to be flattened).
   1682   __ bind(&call_runtime_);
   1683   call_helper.BeforeCall(masm);
   1684   __ Push(object_);
   1685   __ Integer32ToSmi(index_, index_);
   1686   __ Push(index_);
   1687   __ CallRuntime(Runtime::kStringCharCodeAtRT);
   1688   if (!result_.is(rax)) {
   1689     __ movp(result_, rax);
   1690   }
   1691   call_helper.AfterCall(masm);
   1692   __ jmp(&exit_);
   1693 
   1694   __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
   1695 }
   1696 
   1697 void StringHelper::GenerateFlatOneByteStringEquals(MacroAssembler* masm,
   1698                                                    Register left,
   1699                                                    Register right,
   1700                                                    Register scratch1,
   1701                                                    Register scratch2) {
   1702   Register length = scratch1;
   1703 
   1704   // Compare lengths.
   1705   Label check_zero_length;
   1706   __ movp(length, FieldOperand(left, String::kLengthOffset));
   1707   __ SmiCompare(length, FieldOperand(right, String::kLengthOffset));
   1708   __ j(equal, &check_zero_length, Label::kNear);
   1709   __ Move(rax, Smi::FromInt(NOT_EQUAL));
   1710   __ ret(0);
   1711 
   1712   // Check if the length is zero.
   1713   Label compare_chars;
   1714   __ bind(&check_zero_length);
   1715   STATIC_ASSERT(kSmiTag == 0);
   1716   __ SmiTest(length);
   1717   __ j(not_zero, &compare_chars, Label::kNear);
   1718   __ Move(rax, Smi::FromInt(EQUAL));
   1719   __ ret(0);
   1720 
   1721   // Compare characters.
   1722   __ bind(&compare_chars);
   1723   Label strings_not_equal;
   1724   GenerateOneByteCharsCompareLoop(masm, left, right, length, scratch2,
   1725                                   &strings_not_equal, Label::kNear);
   1726 
   1727   // Characters are equal.
   1728   __ Move(rax, Smi::FromInt(EQUAL));
   1729   __ ret(0);
   1730 
   1731   // Characters are not equal.
   1732   __ bind(&strings_not_equal);
   1733   __ Move(rax, Smi::FromInt(NOT_EQUAL));
   1734   __ ret(0);
   1735 }
   1736 
   1737 
   1738 void StringHelper::GenerateCompareFlatOneByteStrings(
   1739     MacroAssembler* masm, Register left, Register right, Register scratch1,
   1740     Register scratch2, Register scratch3, Register scratch4) {
   1741   // Ensure that you can always subtract a string length from a non-negative
   1742   // number (e.g. another length).
   1743   STATIC_ASSERT(String::kMaxLength < 0x7fffffff);
   1744 
   1745   // Find minimum length and length difference.
   1746   __ movp(scratch1, FieldOperand(left, String::kLengthOffset));
   1747   __ movp(scratch4, scratch1);
   1748   __ SmiSub(scratch4,
   1749             scratch4,
   1750             FieldOperand(right, String::kLengthOffset));
   1751   // Register scratch4 now holds left.length - right.length.
   1752   const Register length_difference = scratch4;
   1753   Label left_shorter;
   1754   __ j(less, &left_shorter, Label::kNear);
   1755   // The right string isn't longer that the left one.
   1756   // Get the right string's length by subtracting the (non-negative) difference
   1757   // from the left string's length.
   1758   __ SmiSub(scratch1, scratch1, length_difference);
   1759   __ bind(&left_shorter);
   1760   // Register scratch1 now holds Min(left.length, right.length).
   1761   const Register min_length = scratch1;
   1762 
   1763   Label compare_lengths;
   1764   // If min-length is zero, go directly to comparing lengths.
   1765   __ SmiTest(min_length);
   1766   __ j(zero, &compare_lengths, Label::kNear);
   1767 
   1768   // Compare loop.
   1769   Label result_not_equal;
   1770   GenerateOneByteCharsCompareLoop(
   1771       masm, left, right, min_length, scratch2, &result_not_equal,
   1772       // In debug-code mode, SmiTest below might push
   1773       // the target label outside the near range.
   1774       Label::kFar);
   1775 
   1776   // Completed loop without finding different characters.
   1777   // Compare lengths (precomputed).
   1778   __ bind(&compare_lengths);
   1779   __ SmiTest(length_difference);
   1780   Label length_not_equal;
   1781   __ j(not_zero, &length_not_equal, Label::kNear);
   1782 
   1783   // Result is EQUAL.
   1784   __ Move(rax, Smi::FromInt(EQUAL));
   1785   __ ret(0);
   1786 
   1787   Label result_greater;
   1788   Label result_less;
   1789   __ bind(&length_not_equal);
   1790   __ j(greater, &result_greater, Label::kNear);
   1791   __ jmp(&result_less, Label::kNear);
   1792   __ bind(&result_not_equal);
   1793   // Unequal comparison of left to right, either character or length.
   1794   __ j(above, &result_greater, Label::kNear);
   1795   __ bind(&result_less);
   1796 
   1797   // Result is LESS.
   1798   __ Move(rax, Smi::FromInt(LESS));
   1799   __ ret(0);
   1800 
   1801   // Result is GREATER.
   1802   __ bind(&result_greater);
   1803   __ Move(rax, Smi::FromInt(GREATER));
   1804   __ ret(0);
   1805 }
   1806 
   1807 
   1808 void StringHelper::GenerateOneByteCharsCompareLoop(
   1809     MacroAssembler* masm, Register left, Register right, Register length,
   1810     Register scratch, Label* chars_not_equal, Label::Distance near_jump) {
   1811   // Change index to run from -length to -1 by adding length to string
   1812   // start. This means that loop ends when index reaches zero, which
   1813   // doesn't need an additional compare.
   1814   __ SmiToInteger32(length, length);
   1815   __ leap(left,
   1816          FieldOperand(left, length, times_1, SeqOneByteString::kHeaderSize));
   1817   __ leap(right,
   1818          FieldOperand(right, length, times_1, SeqOneByteString::kHeaderSize));
   1819   __ negq(length);
   1820   Register index = length;  // index = -length;
   1821 
   1822   // Compare loop.
   1823   Label loop;
   1824   __ bind(&loop);
   1825   __ movb(scratch, Operand(left, index, times_1, 0));
   1826   __ cmpb(scratch, Operand(right, index, times_1, 0));
   1827   __ j(not_equal, chars_not_equal, near_jump);
   1828   __ incq(index);
   1829   __ j(not_zero, &loop);
   1830 }
   1831 
   1832 
   1833 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
   1834   // ----------- S t a t e -------------
   1835   //  -- rdx    : left
   1836   //  -- rax    : right
   1837   //  -- rsp[0] : return address
   1838   // -----------------------------------
   1839 
   1840   // Load rcx with the allocation site.  We stick an undefined dummy value here
   1841   // and replace it with the real allocation site later when we instantiate this
   1842   // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
   1843   __ Move(rcx, isolate()->factory()->undefined_value());
   1844 
   1845   // Make sure that we actually patched the allocation site.
   1846   if (FLAG_debug_code) {
   1847     __ testb(rcx, Immediate(kSmiTagMask));
   1848     __ Assert(not_equal, kExpectedAllocationSite);
   1849     __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset),
   1850            isolate()->factory()->allocation_site_map());
   1851     __ Assert(equal, kExpectedAllocationSite);
   1852   }
   1853 
   1854   // Tail call into the stub that handles binary operations with allocation
   1855   // sites.
   1856   BinaryOpWithAllocationSiteStub stub(isolate(), state());
   1857   __ TailCallStub(&stub);
   1858 }
   1859 
   1860 
   1861 void CompareICStub::GenerateBooleans(MacroAssembler* masm) {
   1862   DCHECK_EQ(CompareICState::BOOLEAN, state());
   1863   Label miss;
   1864   Label::Distance const miss_distance =
   1865       masm->emit_debug_code() ? Label::kFar : Label::kNear;
   1866 
   1867   __ JumpIfSmi(rdx, &miss, miss_distance);
   1868   __ movp(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
   1869   __ JumpIfSmi(rax, &miss, miss_distance);
   1870   __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset));
   1871   __ JumpIfNotRoot(rcx, Heap::kBooleanMapRootIndex, &miss, miss_distance);
   1872   __ JumpIfNotRoot(rbx, Heap::kBooleanMapRootIndex, &miss, miss_distance);
   1873   if (!Token::IsEqualityOp(op())) {
   1874     __ movp(rax, FieldOperand(rax, Oddball::kToNumberOffset));
   1875     __ AssertSmi(rax);
   1876     __ movp(rdx, FieldOperand(rdx, Oddball::kToNumberOffset));
   1877     __ AssertSmi(rdx);
   1878     __ pushq(rax);
   1879     __ movq(rax, rdx);
   1880     __ popq(rdx);
   1881   }
   1882   __ subp(rax, rdx);
   1883   __ Ret();
   1884 
   1885   __ bind(&miss);
   1886   GenerateMiss(masm);
   1887 }
   1888 
   1889 
   1890 void CompareICStub::GenerateSmis(MacroAssembler* masm) {
   1891   DCHECK(state() == CompareICState::SMI);
   1892   Label miss;
   1893   __ JumpIfNotBothSmi(rdx, rax, &miss, Label::kNear);
   1894 
   1895   if (GetCondition() == equal) {
   1896     // For equality we do not care about the sign of the result.
   1897     __ subp(rax, rdx);
   1898   } else {
   1899     Label done;
   1900     __ subp(rdx, rax);
   1901     __ j(no_overflow, &done, Label::kNear);
   1902     // Correct sign of result in case of overflow.
   1903     __ notp(rdx);
   1904     __ bind(&done);
   1905     __ movp(rax, rdx);
   1906   }
   1907   __ ret(0);
   1908 
   1909   __ bind(&miss);
   1910   GenerateMiss(masm);
   1911 }
   1912 
   1913 
   1914 void CompareICStub::GenerateNumbers(MacroAssembler* masm) {
   1915   DCHECK(state() == CompareICState::NUMBER);
   1916 
   1917   Label generic_stub;
   1918   Label unordered, maybe_undefined1, maybe_undefined2;
   1919   Label miss;
   1920 
   1921   if (left() == CompareICState::SMI) {
   1922     __ JumpIfNotSmi(rdx, &miss);
   1923   }
   1924   if (right() == CompareICState::SMI) {
   1925     __ JumpIfNotSmi(rax, &miss);
   1926   }
   1927 
   1928   // Load left and right operand.
   1929   Label done, left, left_smi, right_smi;
   1930   __ JumpIfSmi(rax, &right_smi, Label::kNear);
   1931   __ CompareMap(rax, isolate()->factory()->heap_number_map());
   1932   __ j(not_equal, &maybe_undefined1, Label::kNear);
   1933   __ Movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
   1934   __ jmp(&left, Label::kNear);
   1935   __ bind(&right_smi);
   1936   __ SmiToInteger32(rcx, rax);  // Can't clobber rax yet.
   1937   __ Cvtlsi2sd(xmm1, rcx);
   1938 
   1939   __ bind(&left);
   1940   __ JumpIfSmi(rdx, &left_smi, Label::kNear);
   1941   __ CompareMap(rdx, isolate()->factory()->heap_number_map());
   1942   __ j(not_equal, &maybe_undefined2, Label::kNear);
   1943   __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
   1944   __ jmp(&done);
   1945   __ bind(&left_smi);
   1946   __ SmiToInteger32(rcx, rdx);  // Can't clobber rdx yet.
   1947   __ Cvtlsi2sd(xmm0, rcx);
   1948 
   1949   __ bind(&done);
   1950   // Compare operands
   1951   __ Ucomisd(xmm0, xmm1);
   1952 
   1953   // Don't base result on EFLAGS when a NaN is involved.
   1954   __ j(parity_even, &unordered, Label::kNear);
   1955 
   1956   // Return a result of -1, 0, or 1, based on EFLAGS.
   1957   // Performing mov, because xor would destroy the flag register.
   1958   __ movl(rax, Immediate(0));
   1959   __ movl(rcx, Immediate(0));
   1960   __ setcc(above, rax);  // Add one to zero if carry clear and not equal.
   1961   __ sbbp(rax, rcx);  // Subtract one if below (aka. carry set).
   1962   __ ret(0);
   1963 
   1964   __ bind(&unordered);
   1965   __ bind(&generic_stub);
   1966   CompareICStub stub(isolate(), op(), CompareICState::GENERIC,
   1967                      CompareICState::GENERIC, CompareICState::GENERIC);
   1968   __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
   1969 
   1970   __ bind(&maybe_undefined1);
   1971   if (Token::IsOrderedRelationalCompareOp(op())) {
   1972     __ Cmp(rax, isolate()->factory()->undefined_value());
   1973     __ j(not_equal, &miss);
   1974     __ JumpIfSmi(rdx, &unordered);
   1975     __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx);
   1976     __ j(not_equal, &maybe_undefined2, Label::kNear);
   1977     __ jmp(&unordered);
   1978   }
   1979 
   1980   __ bind(&maybe_undefined2);
   1981   if (Token::IsOrderedRelationalCompareOp(op())) {
   1982     __ Cmp(rdx, isolate()->factory()->undefined_value());
   1983     __ j(equal, &unordered);
   1984   }
   1985 
   1986   __ bind(&miss);
   1987   GenerateMiss(masm);
   1988 }
   1989 
   1990 
   1991 void CompareICStub::GenerateInternalizedStrings(MacroAssembler* masm) {
   1992   DCHECK(state() == CompareICState::INTERNALIZED_STRING);
   1993   DCHECK(GetCondition() == equal);
   1994 
   1995   // Registers containing left and right operands respectively.
   1996   Register left = rdx;
   1997   Register right = rax;
   1998   Register tmp1 = rcx;
   1999   Register tmp2 = rbx;
   2000 
   2001   // Check that both operands are heap objects.
   2002   Label miss;
   2003   Condition cond = masm->CheckEitherSmi(left, right, tmp1);
   2004   __ j(cond, &miss, Label::kNear);
   2005 
   2006   // Check that both operands are internalized strings.
   2007   __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset));
   2008   __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset));
   2009   __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
   2010   __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
   2011   STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
   2012   __ orp(tmp1, tmp2);
   2013   __ testb(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
   2014   __ j(not_zero, &miss, Label::kNear);
   2015 
   2016   // Internalized strings are compared by identity.
   2017   Label done;
   2018   __ cmpp(left, right);
   2019   // Make sure rax is non-zero. At this point input operands are
   2020   // guaranteed to be non-zero.
   2021   DCHECK(right.is(rax));
   2022   __ j(not_equal, &done, Label::kNear);
   2023   STATIC_ASSERT(EQUAL == 0);
   2024   STATIC_ASSERT(kSmiTag == 0);
   2025   __ Move(rax, Smi::FromInt(EQUAL));
   2026   __ bind(&done);
   2027   __ ret(0);
   2028 
   2029   __ bind(&miss);
   2030   GenerateMiss(masm);
   2031 }
   2032 
   2033 
   2034 void CompareICStub::GenerateUniqueNames(MacroAssembler* masm) {
   2035   DCHECK(state() == CompareICState::UNIQUE_NAME);
   2036   DCHECK(GetCondition() == equal);
   2037 
   2038   // Registers containing left and right operands respectively.
   2039   Register left = rdx;
   2040   Register right = rax;
   2041   Register tmp1 = rcx;
   2042   Register tmp2 = rbx;
   2043 
   2044   // Check that both operands are heap objects.
   2045   Label miss;
   2046   Condition cond = masm->CheckEitherSmi(left, right, tmp1);
   2047   __ j(cond, &miss, Label::kNear);
   2048 
   2049   // Check that both operands are unique names. This leaves the instance
   2050   // types loaded in tmp1 and tmp2.
   2051   __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset));
   2052   __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset));
   2053   __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
   2054   __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
   2055 
   2056   __ JumpIfNotUniqueNameInstanceType(tmp1, &miss, Label::kNear);
   2057   __ JumpIfNotUniqueNameInstanceType(tmp2, &miss, Label::kNear);
   2058 
   2059   // Unique names are compared by identity.
   2060   Label done;
   2061   __ cmpp(left, right);
   2062   // Make sure rax is non-zero. At this point input operands are
   2063   // guaranteed to be non-zero.
   2064   DCHECK(right.is(rax));
   2065   __ j(not_equal, &done, Label::kNear);
   2066   STATIC_ASSERT(EQUAL == 0);
   2067   STATIC_ASSERT(kSmiTag == 0);
   2068   __ Move(rax, Smi::FromInt(EQUAL));
   2069   __ bind(&done);
   2070   __ ret(0);
   2071 
   2072   __ bind(&miss);
   2073   GenerateMiss(masm);
   2074 }
   2075 
   2076 
   2077 void CompareICStub::GenerateStrings(MacroAssembler* masm) {
   2078   DCHECK(state() == CompareICState::STRING);
   2079   Label miss;
   2080 
   2081   bool equality = Token::IsEqualityOp(op());
   2082 
   2083   // Registers containing left and right operands respectively.
   2084   Register left = rdx;
   2085   Register right = rax;
   2086   Register tmp1 = rcx;
   2087   Register tmp2 = rbx;
   2088   Register tmp3 = rdi;
   2089 
   2090   // Check that both operands are heap objects.
   2091   Condition cond = masm->CheckEitherSmi(left, right, tmp1);
   2092   __ j(cond, &miss);
   2093 
   2094   // Check that both operands are strings. This leaves the instance
   2095   // types loaded in tmp1 and tmp2.
   2096   __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset));
   2097   __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset));
   2098   __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
   2099   __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
   2100   __ movp(tmp3, tmp1);
   2101   STATIC_ASSERT(kNotStringTag != 0);
   2102   __ orp(tmp3, tmp2);
   2103   __ testb(tmp3, Immediate(kIsNotStringMask));
   2104   __ j(not_zero, &miss);
   2105 
   2106   // Fast check for identical strings.
   2107   Label not_same;
   2108   __ cmpp(left, right);
   2109   __ j(not_equal, &not_same, Label::kNear);
   2110   STATIC_ASSERT(EQUAL == 0);
   2111   STATIC_ASSERT(kSmiTag == 0);
   2112   __ Move(rax, Smi::FromInt(EQUAL));
   2113   __ ret(0);
   2114 
   2115   // Handle not identical strings.
   2116   __ bind(&not_same);
   2117 
   2118   // Check that both strings are internalized strings. If they are, we're done
   2119   // because we already know they are not identical. We also know they are both
   2120   // strings.
   2121   if (equality) {
   2122     Label do_compare;
   2123     STATIC_ASSERT(kInternalizedTag == 0);
   2124     __ orp(tmp1, tmp2);
   2125     __ testb(tmp1, Immediate(kIsNotInternalizedMask));
   2126     __ j(not_zero, &do_compare, Label::kNear);
   2127     // Make sure rax is non-zero. At this point input operands are
   2128     // guaranteed to be non-zero.
   2129     DCHECK(right.is(rax));
   2130     __ ret(0);
   2131     __ bind(&do_compare);
   2132   }
   2133 
   2134   // Check that both strings are sequential one-byte.
   2135   Label runtime;
   2136   __ JumpIfNotBothSequentialOneByteStrings(left, right, tmp1, tmp2, &runtime);
   2137 
   2138   // Compare flat one-byte strings. Returns when done.
   2139   if (equality) {
   2140     StringHelper::GenerateFlatOneByteStringEquals(masm, left, right, tmp1,
   2141                                                   tmp2);
   2142   } else {
   2143     StringHelper::GenerateCompareFlatOneByteStrings(
   2144         masm, left, right, tmp1, tmp2, tmp3, kScratchRegister);
   2145   }
   2146 
   2147   // Handle more complex cases in runtime.
   2148   __ bind(&runtime);
   2149   if (equality) {
   2150     {
   2151       FrameScope scope(masm, StackFrame::INTERNAL);
   2152       __ Push(left);
   2153       __ Push(right);
   2154       __ CallRuntime(Runtime::kStringEqual);
   2155     }
   2156     __ LoadRoot(rdx, Heap::kTrueValueRootIndex);
   2157     __ subp(rax, rdx);
   2158     __ Ret();
   2159   } else {
   2160     __ PopReturnAddressTo(tmp1);
   2161     __ Push(left);
   2162     __ Push(right);
   2163     __ PushReturnAddressFrom(tmp1);
   2164     __ TailCallRuntime(Runtime::kStringCompare);
   2165   }
   2166 
   2167   __ bind(&miss);
   2168   GenerateMiss(masm);
   2169 }
   2170 
   2171 
   2172 void CompareICStub::GenerateReceivers(MacroAssembler* masm) {
   2173   DCHECK_EQ(CompareICState::RECEIVER, state());
   2174   Label miss;
   2175   Condition either_smi = masm->CheckEitherSmi(rdx, rax);
   2176   __ j(either_smi, &miss, Label::kNear);
   2177 
   2178   STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
   2179   __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx);
   2180   __ j(below, &miss, Label::kNear);
   2181   __ CmpObjectType(rdx, FIRST_JS_RECEIVER_TYPE, rcx);
   2182   __ j(below, &miss, Label::kNear);
   2183 
   2184   DCHECK_EQ(equal, GetCondition());
   2185   __ subp(rax, rdx);
   2186   __ ret(0);
   2187 
   2188   __ bind(&miss);
   2189   GenerateMiss(masm);
   2190 }
   2191 
   2192 
   2193 void CompareICStub::GenerateKnownReceivers(MacroAssembler* masm) {
   2194   Label miss;
   2195   Handle<WeakCell> cell = Map::WeakCellForMap(known_map_);
   2196   Condition either_smi = masm->CheckEitherSmi(rdx, rax);
   2197   __ j(either_smi, &miss, Label::kNear);
   2198 
   2199   __ GetWeakValue(rdi, cell);
   2200   __ cmpp(FieldOperand(rdx, HeapObject::kMapOffset), rdi);
   2201   __ j(not_equal, &miss, Label::kNear);
   2202   __ cmpp(FieldOperand(rax, HeapObject::kMapOffset), rdi);
   2203   __ j(not_equal, &miss, Label::kNear);
   2204 
   2205   if (Token::IsEqualityOp(op())) {
   2206     __ subp(rax, rdx);
   2207     __ ret(0);
   2208   } else {
   2209     __ PopReturnAddressTo(rcx);
   2210     __ Push(rdx);
   2211     __ Push(rax);
   2212     __ Push(Smi::FromInt(NegativeComparisonResult(GetCondition())));
   2213     __ PushReturnAddressFrom(rcx);
   2214     __ TailCallRuntime(Runtime::kCompare);
   2215   }
   2216 
   2217   __ bind(&miss);
   2218   GenerateMiss(masm);
   2219 }
   2220 
   2221 
   2222 void CompareICStub::GenerateMiss(MacroAssembler* masm) {
   2223   {
   2224     // Call the runtime system in a fresh internal frame.
   2225     FrameScope scope(masm, StackFrame::INTERNAL);
   2226     __ Push(rdx);
   2227     __ Push(rax);
   2228     __ Push(rdx);
   2229     __ Push(rax);
   2230     __ Push(Smi::FromInt(op()));
   2231     __ CallRuntime(Runtime::kCompareIC_Miss);
   2232 
   2233     // Compute the entry point of the rewritten stub.
   2234     __ leap(rdi, FieldOperand(rax, Code::kHeaderSize));
   2235     __ Pop(rax);
   2236     __ Pop(rdx);
   2237   }
   2238 
   2239   // Do a tail call to the rewritten stub.
   2240   __ jmp(rdi);
   2241 }
   2242 
   2243 
   2244 void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
   2245                                                       Label* miss,
   2246                                                       Label* done,
   2247                                                       Register properties,
   2248                                                       Handle<Name> name,
   2249                                                       Register r0) {
   2250   DCHECK(name->IsUniqueName());
   2251   // If names of slots in range from 1 to kProbes - 1 for the hash value are
   2252   // not equal to the name and kProbes-th slot is not used (its name is the
   2253   // undefined value), it guarantees the hash table doesn't contain the
   2254   // property. It's true even if some slots represent deleted properties
   2255   // (their names are the hole value).
   2256   for (int i = 0; i < kInlinedProbes; i++) {
   2257     // r0 points to properties hash.
   2258     // Compute the masked index: (hash + i + i * i) & mask.
   2259     Register index = r0;
   2260     // Capacity is smi 2^n.
   2261     __ SmiToInteger32(index, FieldOperand(properties, kCapacityOffset));
   2262     __ decl(index);
   2263     __ andp(index,
   2264             Immediate(name->Hash() + NameDictionary::GetProbeOffset(i)));
   2265 
   2266     // Scale the index by multiplying by the entry size.
   2267     STATIC_ASSERT(NameDictionary::kEntrySize == 3);
   2268     __ leap(index, Operand(index, index, times_2, 0));  // index *= 3.
   2269 
   2270     Register entity_name = r0;
   2271     // Having undefined at this place means the name is not contained.
   2272     STATIC_ASSERT(kSmiTagSize == 1);
   2273     __ movp(entity_name, Operand(properties,
   2274                                  index,
   2275                                  times_pointer_size,
   2276                                  kElementsStartOffset - kHeapObjectTag));
   2277     __ Cmp(entity_name, masm->isolate()->factory()->undefined_value());
   2278     __ j(equal, done);
   2279 
   2280     // Stop if found the property.
   2281     __ Cmp(entity_name, Handle<Name>(name));
   2282     __ j(equal, miss);
   2283 
   2284     Label good;
   2285     // Check for the hole and skip.
   2286     __ CompareRoot(entity_name, Heap::kTheHoleValueRootIndex);
   2287     __ j(equal, &good, Label::kNear);
   2288 
   2289     // Check if the entry name is not a unique name.
   2290     __ movp(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
   2291     __ JumpIfNotUniqueNameInstanceType(
   2292         FieldOperand(entity_name, Map::kInstanceTypeOffset), miss);
   2293     __ bind(&good);
   2294   }
   2295 
   2296   NameDictionaryLookupStub stub(masm->isolate(), properties, r0, r0,
   2297                                 NEGATIVE_LOOKUP);
   2298   __ Push(Handle<Object>(name));
   2299   __ Push(Immediate(name->Hash()));
   2300   __ CallStub(&stub);
   2301   __ testp(r0, r0);
   2302   __ j(not_zero, miss);
   2303   __ jmp(done);
   2304 }
   2305 
   2306 void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
   2307   // This stub overrides SometimesSetsUpAFrame() to return false.  That means
   2308   // we cannot call anything that could cause a GC from this stub.
   2309   // Stack frame on entry:
   2310   //  rsp[0 * kPointerSize] : return address.
   2311   //  rsp[1 * kPointerSize] : key's hash.
   2312   //  rsp[2 * kPointerSize] : key.
   2313   // Registers:
   2314   //  dictionary_: NameDictionary to probe.
   2315   //  result_: used as scratch.
   2316   //  index_: will hold an index of entry if lookup is successful.
   2317   //          might alias with result_.
   2318   // Returns:
   2319   //  result_ is zero if lookup failed, non zero otherwise.
   2320 
   2321   Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
   2322 
   2323   Register scratch = result();
   2324 
   2325   __ SmiToInteger32(scratch, FieldOperand(dictionary(), kCapacityOffset));
   2326   __ decl(scratch);
   2327   __ Push(scratch);
   2328 
   2329   // If names of slots in range from 1 to kProbes - 1 for the hash value are
   2330   // not equal to the name and kProbes-th slot is not used (its name is the
   2331   // undefined value), it guarantees the hash table doesn't contain the
   2332   // property. It's true even if some slots represent deleted properties
   2333   // (their names are the null value).
   2334   StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER,
   2335                               kPointerSize);
   2336   for (int i = kInlinedProbes; i < kTotalProbes; i++) {
   2337     // Compute the masked index: (hash + i + i * i) & mask.
   2338     __ movp(scratch, args.GetArgumentOperand(1));
   2339     if (i > 0) {
   2340       __ addl(scratch, Immediate(NameDictionary::GetProbeOffset(i)));
   2341     }
   2342     __ andp(scratch, Operand(rsp, 0));
   2343 
   2344     // Scale the index by multiplying by the entry size.
   2345     STATIC_ASSERT(NameDictionary::kEntrySize == 3);
   2346     __ leap(index(), Operand(scratch, scratch, times_2, 0));  // index *= 3.
   2347 
   2348     // Having undefined at this place means the name is not contained.
   2349     __ movp(scratch, Operand(dictionary(), index(), times_pointer_size,
   2350                              kElementsStartOffset - kHeapObjectTag));
   2351 
   2352     __ Cmp(scratch, isolate()->factory()->undefined_value());
   2353     __ j(equal, &not_in_dictionary);
   2354 
   2355     // Stop if found the property.
   2356     __ cmpp(scratch, args.GetArgumentOperand(0));
   2357     __ j(equal, &in_dictionary);
   2358 
   2359     if (i != kTotalProbes - 1 && mode() == NEGATIVE_LOOKUP) {
   2360       // If we hit a key that is not a unique name during negative
   2361       // lookup we have to bailout as this key might be equal to the
   2362       // key we are looking for.
   2363 
   2364       // Check if the entry name is not a unique name.
   2365       __ movp(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
   2366       __ JumpIfNotUniqueNameInstanceType(
   2367           FieldOperand(scratch, Map::kInstanceTypeOffset),
   2368           &maybe_in_dictionary);
   2369     }
   2370   }
   2371 
   2372   __ bind(&maybe_in_dictionary);
   2373   // If we are doing negative lookup then probing failure should be
   2374   // treated as a lookup success. For positive lookup probing failure
   2375   // should be treated as lookup failure.
   2376   if (mode() == POSITIVE_LOOKUP) {
   2377     __ movp(scratch, Immediate(0));
   2378     __ Drop(1);
   2379     __ ret(2 * kPointerSize);
   2380   }
   2381 
   2382   __ bind(&in_dictionary);
   2383   __ movp(scratch, Immediate(1));
   2384   __ Drop(1);
   2385   __ ret(2 * kPointerSize);
   2386 
   2387   __ bind(&not_in_dictionary);
   2388   __ movp(scratch, Immediate(0));
   2389   __ Drop(1);
   2390   __ ret(2 * kPointerSize);
   2391 }
   2392 
   2393 
   2394 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
   2395     Isolate* isolate) {
   2396   StoreBufferOverflowStub stub1(isolate, kDontSaveFPRegs);
   2397   stub1.GetCode();
   2398   StoreBufferOverflowStub stub2(isolate, kSaveFPRegs);
   2399   stub2.GetCode();
   2400 }
   2401 
   2402 
   2403 // Takes the input in 3 registers: address_ value_ and object_.  A pointer to
   2404 // the value has just been written into the object, now this stub makes sure
   2405 // we keep the GC informed.  The word in the object where the value has been
   2406 // written is in the address register.
   2407 void RecordWriteStub::Generate(MacroAssembler* masm) {
   2408   Label skip_to_incremental_noncompacting;
   2409   Label skip_to_incremental_compacting;
   2410 
   2411   // The first two instructions are generated with labels so as to get the
   2412   // offset fixed up correctly by the bind(Label*) call.  We patch it back and
   2413   // forth between a compare instructions (a nop in this position) and the
   2414   // real branch when we start and stop incremental heap marking.
   2415   // See RecordWriteStub::Patch for details.
   2416   __ jmp(&skip_to_incremental_noncompacting, Label::kNear);
   2417   __ jmp(&skip_to_incremental_compacting, Label::kFar);
   2418 
   2419   if (remembered_set_action() == EMIT_REMEMBERED_SET) {
   2420     __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
   2421                            MacroAssembler::kReturnAtEnd);
   2422   } else {
   2423     __ ret(0);
   2424   }
   2425 
   2426   __ bind(&skip_to_incremental_noncompacting);
   2427   GenerateIncremental(masm, INCREMENTAL);
   2428 
   2429   __ bind(&skip_to_incremental_compacting);
   2430   GenerateIncremental(masm, INCREMENTAL_COMPACTION);
   2431 
   2432   // Initial mode of the stub is expected to be STORE_BUFFER_ONLY.
   2433   // Will be checked in IncrementalMarking::ActivateGeneratedStub.
   2434   masm->set_byte_at(0, kTwoByteNopInstruction);
   2435   masm->set_byte_at(2, kFiveByteNopInstruction);
   2436 }
   2437 
   2438 
   2439 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
   2440   regs_.Save(masm);
   2441 
   2442   if (remembered_set_action() == EMIT_REMEMBERED_SET) {
   2443     Label dont_need_remembered_set;
   2444 
   2445     __ movp(regs_.scratch0(), Operand(regs_.address(), 0));
   2446     __ JumpIfNotInNewSpace(regs_.scratch0(),
   2447                            regs_.scratch0(),
   2448                            &dont_need_remembered_set);
   2449 
   2450     __ JumpIfInNewSpace(regs_.object(), regs_.scratch0(),
   2451                         &dont_need_remembered_set);
   2452 
   2453     // First notify the incremental marker if necessary, then update the
   2454     // remembered set.
   2455     CheckNeedsToInformIncrementalMarker(
   2456         masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode);
   2457     InformIncrementalMarker(masm);
   2458     regs_.Restore(masm);
   2459     __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
   2460                            MacroAssembler::kReturnAtEnd);
   2461 
   2462     __ bind(&dont_need_remembered_set);
   2463   }
   2464 
   2465   CheckNeedsToInformIncrementalMarker(
   2466       masm, kReturnOnNoNeedToInformIncrementalMarker, mode);
   2467   InformIncrementalMarker(masm);
   2468   regs_.Restore(masm);
   2469   __ ret(0);
   2470 }
   2471 
   2472 
   2473 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
   2474   regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode());
   2475   Register address =
   2476       arg_reg_1.is(regs_.address()) ? kScratchRegister : regs_.address();
   2477   DCHECK(!address.is(regs_.object()));
   2478   DCHECK(!address.is(arg_reg_1));
   2479   __ Move(address, regs_.address());
   2480   __ Move(arg_reg_1, regs_.object());
   2481   // TODO(gc) Can we just set address arg2 in the beginning?
   2482   __ Move(arg_reg_2, address);
   2483   __ LoadAddress(arg_reg_3,
   2484                  ExternalReference::isolate_address(isolate()));
   2485   int argument_count = 3;
   2486 
   2487   AllowExternalCallThatCantCauseGC scope(masm);
   2488   __ PrepareCallCFunction(argument_count);
   2489   __ CallCFunction(
   2490       ExternalReference::incremental_marking_record_write_function(isolate()),
   2491       argument_count);
   2492   regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode());
   2493 }
   2494 
   2495 void RecordWriteStub::Activate(Code* code) {
   2496   code->GetHeap()->incremental_marking()->ActivateGeneratedStub(code);
   2497 }
   2498 
   2499 void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
   2500     MacroAssembler* masm,
   2501     OnNoNeedToInformIncrementalMarker on_no_need,
   2502     Mode mode) {
   2503   Label on_black;
   2504   Label need_incremental;
   2505   Label need_incremental_pop_object;
   2506 
   2507   // Let's look at the color of the object:  If it is not black we don't have
   2508   // to inform the incremental marker.
   2509   __ JumpIfBlack(regs_.object(),
   2510                  regs_.scratch0(),
   2511                  regs_.scratch1(),
   2512                  &on_black,
   2513                  Label::kNear);
   2514 
   2515   regs_.Restore(masm);
   2516   if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
   2517     __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
   2518                            MacroAssembler::kReturnAtEnd);
   2519   } else {
   2520     __ ret(0);
   2521   }
   2522 
   2523   __ bind(&on_black);
   2524 
   2525   // Get the value from the slot.
   2526   __ movp(regs_.scratch0(), Operand(regs_.address(), 0));
   2527 
   2528   if (mode == INCREMENTAL_COMPACTION) {
   2529     Label ensure_not_white;
   2530 
   2531     __ CheckPageFlag(regs_.scratch0(),  // Contains value.
   2532                      regs_.scratch1(),  // Scratch.
   2533                      MemoryChunk::kEvacuationCandidateMask,
   2534                      zero,
   2535                      &ensure_not_white,
   2536                      Label::kNear);
   2537 
   2538     __ CheckPageFlag(regs_.object(),
   2539                      regs_.scratch1(),  // Scratch.
   2540                      MemoryChunk::kSkipEvacuationSlotsRecordingMask,
   2541                      zero,
   2542                      &need_incremental);
   2543 
   2544     __ bind(&ensure_not_white);
   2545   }
   2546 
   2547   // We need an extra register for this, so we push the object register
   2548   // temporarily.
   2549   __ Push(regs_.object());
   2550   __ JumpIfWhite(regs_.scratch0(),  // The value.
   2551                  regs_.scratch1(),  // Scratch.
   2552                  regs_.object(),    // Scratch.
   2553                  &need_incremental_pop_object, Label::kNear);
   2554   __ Pop(regs_.object());
   2555 
   2556   regs_.Restore(masm);
   2557   if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
   2558     __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
   2559                            MacroAssembler::kReturnAtEnd);
   2560   } else {
   2561     __ ret(0);
   2562   }
   2563 
   2564   __ bind(&need_incremental_pop_object);
   2565   __ Pop(regs_.object());
   2566 
   2567   __ bind(&need_incremental);
   2568 
   2569   // Fall through when we need to inform the incremental marker.
   2570 }
   2571 
   2572 
   2573 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
   2574   CEntryStub ces(isolate(), 1, kSaveFPRegs);
   2575   __ Call(ces.GetCode(), RelocInfo::CODE_TARGET);
   2576   int parameter_count_offset =
   2577       StubFailureTrampolineFrameConstants::kArgumentsLengthOffset;
   2578   __ movp(rbx, MemOperand(rbp, parameter_count_offset));
   2579   masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
   2580   __ PopReturnAddressTo(rcx);
   2581   int additional_offset =
   2582       function_mode() == JS_FUNCTION_STUB_MODE ? kPointerSize : 0;
   2583   __ leap(rsp, MemOperand(rsp, rbx, times_pointer_size, additional_offset));
   2584   __ jmp(rcx);  // Return to IC Miss stub, continuation still on stack.
   2585 }
   2586 
   2587 
   2588 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
   2589   if (masm->isolate()->function_entry_hook() != NULL) {
   2590     ProfileEntryHookStub stub(masm->isolate());
   2591     masm->CallStub(&stub);
   2592   }
   2593 }
   2594 
   2595 
   2596 void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
   2597   // This stub can be called from essentially anywhere, so it needs to save
   2598   // all volatile and callee-save registers.
   2599   const size_t kNumSavedRegisters = 2;
   2600   __ pushq(arg_reg_1);
   2601   __ pushq(arg_reg_2);
   2602 
   2603   // Calculate the original stack pointer and store it in the second arg.
   2604   __ leap(arg_reg_2,
   2605          Operand(rsp, kNumSavedRegisters * kRegisterSize + kPCOnStackSize));
   2606 
   2607   // Calculate the function address to the first arg.
   2608   __ movp(arg_reg_1, Operand(rsp, kNumSavedRegisters * kRegisterSize));
   2609   __ subp(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength));
   2610 
   2611   // Save the remainder of the volatile registers.
   2612   masm->PushCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2);
   2613 
   2614   // Call the entry hook function.
   2615   __ Move(rax, FUNCTION_ADDR(isolate()->function_entry_hook()),
   2616           Assembler::RelocInfoNone());
   2617 
   2618   AllowExternalCallThatCantCauseGC scope(masm);
   2619 
   2620   const int kArgumentCount = 2;
   2621   __ PrepareCallCFunction(kArgumentCount);
   2622   __ CallCFunction(rax, kArgumentCount);
   2623 
   2624   // Restore volatile regs.
   2625   masm->PopCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2);
   2626   __ popq(arg_reg_2);
   2627   __ popq(arg_reg_1);
   2628 
   2629   __ Ret();
   2630 }
   2631 
   2632 
   2633 template<class T>
   2634 static void CreateArrayDispatch(MacroAssembler* masm,
   2635                                 AllocationSiteOverrideMode mode) {
   2636   if (mode == DISABLE_ALLOCATION_SITES) {
   2637     T stub(masm->isolate(), GetInitialFastElementsKind(), mode);
   2638     __ TailCallStub(&stub);
   2639   } else if (mode == DONT_OVERRIDE) {
   2640     int last_index = GetSequenceIndexFromFastElementsKind(
   2641         TERMINAL_FAST_ELEMENTS_KIND);
   2642     for (int i = 0; i <= last_index; ++i) {
   2643       Label next;
   2644       ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
   2645       __ cmpl(rdx, Immediate(kind));
   2646       __ j(not_equal, &next);
   2647       T stub(masm->isolate(), kind);
   2648       __ TailCallStub(&stub);
   2649       __ bind(&next);
   2650     }
   2651 
   2652     // If we reached this point there is a problem.
   2653     __ Abort(kUnexpectedElementsKindInArrayConstructor);
   2654   } else {
   2655     UNREACHABLE();
   2656   }
   2657 }
   2658 
   2659 
   2660 static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
   2661                                            AllocationSiteOverrideMode mode) {
   2662   // rbx - allocation site (if mode != DISABLE_ALLOCATION_SITES)
   2663   // rdx - kind (if mode != DISABLE_ALLOCATION_SITES)
   2664   // rax - number of arguments
   2665   // rdi - constructor?
   2666   // rsp[0] - return address
   2667   // rsp[8] - last argument
   2668 
   2669   Label normal_sequence;
   2670   if (mode == DONT_OVERRIDE) {
   2671     STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
   2672     STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
   2673     STATIC_ASSERT(FAST_ELEMENTS == 2);
   2674     STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
   2675     STATIC_ASSERT(FAST_DOUBLE_ELEMENTS == 4);
   2676     STATIC_ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5);
   2677 
   2678     // is the low bit set? If so, we are holey and that is good.
   2679     __ testb(rdx, Immediate(1));
   2680     __ j(not_zero, &normal_sequence);
   2681   }
   2682 
   2683   // look at the first argument
   2684   StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
   2685   __ movp(rcx, args.GetArgumentOperand(0));
   2686   __ testp(rcx, rcx);
   2687   __ j(zero, &normal_sequence);
   2688 
   2689   if (mode == DISABLE_ALLOCATION_SITES) {
   2690     ElementsKind initial = GetInitialFastElementsKind();
   2691     ElementsKind holey_initial = GetHoleyElementsKind(initial);
   2692 
   2693     ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
   2694                                                   holey_initial,
   2695                                                   DISABLE_ALLOCATION_SITES);
   2696     __ TailCallStub(&stub_holey);
   2697 
   2698     __ bind(&normal_sequence);
   2699     ArraySingleArgumentConstructorStub stub(masm->isolate(),
   2700                                             initial,
   2701                                             DISABLE_ALLOCATION_SITES);
   2702     __ TailCallStub(&stub);
   2703   } else if (mode == DONT_OVERRIDE) {
   2704     // We are going to create a holey array, but our kind is non-holey.
   2705     // Fix kind and retry (only if we have an allocation site in the slot).
   2706     __ incl(rdx);
   2707 
   2708     if (FLAG_debug_code) {
   2709       Handle<Map> allocation_site_map =
   2710           masm->isolate()->factory()->allocation_site_map();
   2711       __ Cmp(FieldOperand(rbx, 0), allocation_site_map);
   2712       __ Assert(equal, kExpectedAllocationSite);
   2713     }
   2714 
   2715     // Save the resulting elements kind in type info. We can't just store r3
   2716     // in the AllocationSite::transition_info field because elements kind is
   2717     // restricted to a portion of the field...upper bits need to be left alone.
   2718     STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
   2719     __ SmiAddConstant(FieldOperand(rbx, AllocationSite::kTransitionInfoOffset),
   2720                       Smi::FromInt(kFastElementsKindPackedToHoley));
   2721 
   2722     __ bind(&normal_sequence);
   2723     int last_index = GetSequenceIndexFromFastElementsKind(
   2724         TERMINAL_FAST_ELEMENTS_KIND);
   2725     for (int i = 0; i <= last_index; ++i) {
   2726       Label next;
   2727       ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
   2728       __ cmpl(rdx, Immediate(kind));
   2729       __ j(not_equal, &next);
   2730       ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
   2731       __ TailCallStub(&stub);
   2732       __ bind(&next);
   2733     }
   2734 
   2735     // If we reached this point there is a problem.
   2736     __ Abort(kUnexpectedElementsKindInArrayConstructor);
   2737   } else {
   2738     UNREACHABLE();
   2739   }
   2740 }
   2741 
   2742 
   2743 template<class T>
   2744 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
   2745   int to_index = GetSequenceIndexFromFastElementsKind(
   2746       TERMINAL_FAST_ELEMENTS_KIND);
   2747   for (int i = 0; i <= to_index; ++i) {
   2748     ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
   2749     T stub(isolate, kind);
   2750     stub.GetCode();
   2751     if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) {
   2752       T stub1(isolate, kind, DISABLE_ALLOCATION_SITES);
   2753       stub1.GetCode();
   2754     }
   2755   }
   2756 }
   2757 
   2758 void CommonArrayConstructorStub::GenerateStubsAheadOfTime(Isolate* isolate) {
   2759   ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
   2760       isolate);
   2761   ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
   2762       isolate);
   2763   ArrayNArgumentsConstructorStub stub(isolate);
   2764   stub.GetCode();
   2765 
   2766   ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
   2767   for (int i = 0; i < 2; i++) {
   2768     // For internal arrays we only need a few things
   2769     InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]);
   2770     stubh1.GetCode();
   2771     InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
   2772     stubh2.GetCode();
   2773   }
   2774 }
   2775 
   2776 void ArrayConstructorStub::GenerateDispatchToArrayStub(
   2777     MacroAssembler* masm, AllocationSiteOverrideMode mode) {
   2778   Label not_zero_case, not_one_case;
   2779   __ testp(rax, rax);
   2780   __ j(not_zero, &not_zero_case);
   2781   CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
   2782 
   2783   __ bind(&not_zero_case);
   2784   __ cmpl(rax, Immediate(1));
   2785   __ j(greater, &not_one_case);
   2786   CreateArrayDispatchOneArgument(masm, mode);
   2787 
   2788   __ bind(&not_one_case);
   2789   ArrayNArgumentsConstructorStub stub(masm->isolate());
   2790   __ TailCallStub(&stub);
   2791 }
   2792 
   2793 void ArrayConstructorStub::Generate(MacroAssembler* masm) {
   2794   // ----------- S t a t e -------------
   2795   //  -- rax    : argc
   2796   //  -- rbx    : AllocationSite or undefined
   2797   //  -- rdi    : constructor
   2798   //  -- rdx    : new target
   2799   //  -- rsp[0] : return address
   2800   //  -- rsp[8] : last argument
   2801   // -----------------------------------
   2802   if (FLAG_debug_code) {
   2803     // The array construct code is only set for the global and natives
   2804     // builtin Array functions which always have maps.
   2805 
   2806     // Initial map for the builtin Array function should be a map.
   2807     __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
   2808     // Will both indicate a NULL and a Smi.
   2809     STATIC_ASSERT(kSmiTag == 0);
   2810     Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
   2811     __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
   2812     __ CmpObjectType(rcx, MAP_TYPE, rcx);
   2813     __ Check(equal, kUnexpectedInitialMapForArrayFunction);
   2814 
   2815     // We should either have undefined in rbx or a valid AllocationSite
   2816     __ AssertUndefinedOrAllocationSite(rbx);
   2817   }
   2818 
   2819   // Enter the context of the Array function.
   2820   __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
   2821 
   2822   Label subclassing;
   2823   __ cmpp(rdi, rdx);
   2824   __ j(not_equal, &subclassing);
   2825 
   2826   Label no_info;
   2827   // If the feedback vector is the undefined value call an array constructor
   2828   // that doesn't use AllocationSites.
   2829   __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
   2830   __ j(equal, &no_info);
   2831 
   2832   // Only look at the lower 16 bits of the transition info.
   2833   __ movp(rdx, FieldOperand(rbx, AllocationSite::kTransitionInfoOffset));
   2834   __ SmiToInteger32(rdx, rdx);
   2835   STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
   2836   __ andp(rdx, Immediate(AllocationSite::ElementsKindBits::kMask));
   2837   GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
   2838 
   2839   __ bind(&no_info);
   2840   GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
   2841 
   2842   // Subclassing
   2843   __ bind(&subclassing);
   2844   StackArgumentsAccessor args(rsp, rax);
   2845   __ movp(args.GetReceiverOperand(), rdi);
   2846   __ addp(rax, Immediate(3));
   2847   __ PopReturnAddressTo(rcx);
   2848   __ Push(rdx);
   2849   __ Push(rbx);
   2850   __ PushReturnAddressFrom(rcx);
   2851   __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate()));
   2852 }
   2853 
   2854 
   2855 void InternalArrayConstructorStub::GenerateCase(
   2856     MacroAssembler* masm, ElementsKind kind) {
   2857   Label not_zero_case, not_one_case;
   2858   Label normal_sequence;
   2859 
   2860   __ testp(rax, rax);
   2861   __ j(not_zero, &not_zero_case);
   2862   InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
   2863   __ TailCallStub(&stub0);
   2864 
   2865   __ bind(&not_zero_case);
   2866   __ cmpl(rax, Immediate(1));
   2867   __ j(greater, &not_one_case);
   2868 
   2869   if (IsFastPackedElementsKind(kind)) {
   2870     // We might need to create a holey array
   2871     // look at the first argument
   2872     StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
   2873     __ movp(rcx, args.GetArgumentOperand(0));
   2874     __ testp(rcx, rcx);
   2875     __ j(zero, &normal_sequence);
   2876 
   2877     InternalArraySingleArgumentConstructorStub
   2878         stub1_holey(isolate(), GetHoleyElementsKind(kind));
   2879     __ TailCallStub(&stub1_holey);
   2880   }
   2881 
   2882   __ bind(&normal_sequence);
   2883   InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
   2884   __ TailCallStub(&stub1);
   2885 
   2886   __ bind(&not_one_case);
   2887   ArrayNArgumentsConstructorStub stubN(isolate());
   2888   __ TailCallStub(&stubN);
   2889 }
   2890 
   2891 
   2892 void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
   2893   // ----------- S t a t e -------------
   2894   //  -- rax    : argc
   2895   //  -- rdi    : constructor
   2896   //  -- rsp[0] : return address
   2897   //  -- rsp[8] : last argument
   2898   // -----------------------------------
   2899 
   2900   if (FLAG_debug_code) {
   2901     // The array construct code is only set for the global and natives
   2902     // builtin Array functions which always have maps.
   2903 
   2904     // Initial map for the builtin Array function should be a map.
   2905     __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
   2906     // Will both indicate a NULL and a Smi.
   2907     STATIC_ASSERT(kSmiTag == 0);
   2908     Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
   2909     __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
   2910     __ CmpObjectType(rcx, MAP_TYPE, rcx);
   2911     __ Check(equal, kUnexpectedInitialMapForArrayFunction);
   2912   }
   2913 
   2914   // Figure out the right elements kind
   2915   __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
   2916 
   2917   // Load the map's "bit field 2" into |result|. We only need the first byte,
   2918   // but the following masking takes care of that anyway.
   2919   __ movzxbp(rcx, FieldOperand(rcx, Map::kBitField2Offset));
   2920   // Retrieve elements_kind from bit field 2.
   2921   __ DecodeField<Map::ElementsKindBits>(rcx);
   2922 
   2923   if (FLAG_debug_code) {
   2924     Label done;
   2925     __ cmpl(rcx, Immediate(FAST_ELEMENTS));
   2926     __ j(equal, &done);
   2927     __ cmpl(rcx, Immediate(FAST_HOLEY_ELEMENTS));
   2928     __ Assert(equal,
   2929               kInvalidElementsKindForInternalArrayOrInternalPackedArray);
   2930     __ bind(&done);
   2931   }
   2932 
   2933   Label fast_elements_case;
   2934   __ cmpl(rcx, Immediate(FAST_ELEMENTS));
   2935   __ j(equal, &fast_elements_case);
   2936   GenerateCase(masm, FAST_HOLEY_ELEMENTS);
   2937 
   2938   __ bind(&fast_elements_case);
   2939   GenerateCase(masm, FAST_ELEMENTS);
   2940 }
   2941 
   2942 static int Offset(ExternalReference ref0, ExternalReference ref1) {
   2943   int64_t offset = (ref0.address() - ref1.address());
   2944   // Check that fits into int.
   2945   DCHECK(static_cast<int>(offset) == offset);
   2946   return static_cast<int>(offset);
   2947 }
   2948 
   2949 // Prepares stack to put arguments (aligns and so on).  WIN64 calling
   2950 // convention requires to put the pointer to the return value slot into
   2951 // rcx (rcx must be preserverd until CallApiFunctionAndReturn).  Saves
   2952 // context (rsi).  Clobbers rax.  Allocates arg_stack_space * kPointerSize
   2953 // inside the exit frame (not GCed) accessible via StackSpaceOperand.
   2954 static void PrepareCallApiFunction(MacroAssembler* masm, int arg_stack_space) {
   2955   __ EnterApiExitFrame(arg_stack_space);
   2956 }
   2957 
   2958 
   2959 // Calls an API function.  Allocates HandleScope, extracts returned value
   2960 // from handle and propagates exceptions.  Clobbers r14, r15, rbx and
   2961 // caller-save registers.  Restores context.  On return removes
   2962 // stack_space * kPointerSize (GCed).
   2963 static void CallApiFunctionAndReturn(MacroAssembler* masm,
   2964                                      Register function_address,
   2965                                      ExternalReference thunk_ref,
   2966                                      Register thunk_last_arg, int stack_space,
   2967                                      Operand* stack_space_operand,
   2968                                      Operand return_value_operand,
   2969                                      Operand* context_restore_operand) {
   2970   Label prologue;
   2971   Label promote_scheduled_exception;
   2972   Label delete_allocated_handles;
   2973   Label leave_exit_frame;
   2974   Label write_back;
   2975 
   2976   Isolate* isolate = masm->isolate();
   2977   Factory* factory = isolate->factory();
   2978   ExternalReference next_address =
   2979       ExternalReference::handle_scope_next_address(isolate);
   2980   const int kNextOffset = 0;
   2981   const int kLimitOffset = Offset(
   2982       ExternalReference::handle_scope_limit_address(isolate), next_address);
   2983   const int kLevelOffset = Offset(
   2984       ExternalReference::handle_scope_level_address(isolate), next_address);
   2985   ExternalReference scheduled_exception_address =
   2986       ExternalReference::scheduled_exception_address(isolate);
   2987 
   2988   DCHECK(rdx.is(function_address) || r8.is(function_address));
   2989   // Allocate HandleScope in callee-save registers.
   2990   Register prev_next_address_reg = r14;
   2991   Register prev_limit_reg = rbx;
   2992   Register base_reg = r15;
   2993   __ Move(base_reg, next_address);
   2994   __ movp(prev_next_address_reg, Operand(base_reg, kNextOffset));
   2995   __ movp(prev_limit_reg, Operand(base_reg, kLimitOffset));
   2996   __ addl(Operand(base_reg, kLevelOffset), Immediate(1));
   2997 
   2998   if (FLAG_log_timer_events) {
   2999     FrameScope frame(masm, StackFrame::MANUAL);
   3000     __ PushSafepointRegisters();
   3001     __ PrepareCallCFunction(1);
   3002     __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
   3003     __ CallCFunction(ExternalReference::log_enter_external_function(isolate),
   3004                      1);
   3005     __ PopSafepointRegisters();
   3006   }
   3007 
   3008   Label profiler_disabled;
   3009   Label end_profiler_check;
   3010   __ Move(rax, ExternalReference::is_profiling_address(isolate));
   3011   __ cmpb(Operand(rax, 0), Immediate(0));
   3012   __ j(zero, &profiler_disabled);
   3013 
   3014   // Third parameter is the address of the actual getter function.
   3015   __ Move(thunk_last_arg, function_address);
   3016   __ Move(rax, thunk_ref);
   3017   __ jmp(&end_profiler_check);
   3018 
   3019   __ bind(&profiler_disabled);
   3020   // Call the api function!
   3021   __ Move(rax, function_address);
   3022 
   3023   __ bind(&end_profiler_check);
   3024 
   3025   // Call the api function!
   3026   __ call(rax);
   3027 
   3028   if (FLAG_log_timer_events) {
   3029     FrameScope frame(masm, StackFrame::MANUAL);
   3030     __ PushSafepointRegisters();
   3031     __ PrepareCallCFunction(1);
   3032     __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
   3033     __ CallCFunction(ExternalReference::log_leave_external_function(isolate),
   3034                      1);
   3035     __ PopSafepointRegisters();
   3036   }
   3037 
   3038   // Load the value from ReturnValue
   3039   __ movp(rax, return_value_operand);
   3040   __ bind(&prologue);
   3041 
   3042   // No more valid handles (the result handle was the last one). Restore
   3043   // previous handle scope.
   3044   __ subl(Operand(base_reg, kLevelOffset), Immediate(1));
   3045   __ movp(Operand(base_reg, kNextOffset), prev_next_address_reg);
   3046   __ cmpp(prev_limit_reg, Operand(base_reg, kLimitOffset));
   3047   __ j(not_equal, &delete_allocated_handles);
   3048 
   3049   // Leave the API exit frame.
   3050   __ bind(&leave_exit_frame);
   3051   bool restore_context = context_restore_operand != NULL;
   3052   if (restore_context) {
   3053     __ movp(rsi, *context_restore_operand);
   3054   }
   3055   if (stack_space_operand != nullptr) {
   3056     __ movp(rbx, *stack_space_operand);
   3057   }
   3058   __ LeaveApiExitFrame(!restore_context);
   3059 
   3060   // Check if the function scheduled an exception.
   3061   __ Move(rdi, scheduled_exception_address);
   3062   __ Cmp(Operand(rdi, 0), factory->the_hole_value());
   3063   __ j(not_equal, &promote_scheduled_exception);
   3064 
   3065 #if DEBUG
   3066   // Check if the function returned a valid JavaScript value.
   3067   Label ok;
   3068   Register return_value = rax;
   3069   Register map = rcx;
   3070 
   3071   __ JumpIfSmi(return_value, &ok, Label::kNear);
   3072   __ movp(map, FieldOperand(return_value, HeapObject::kMapOffset));
   3073 
   3074   __ CmpInstanceType(map, LAST_NAME_TYPE);
   3075   __ j(below_equal, &ok, Label::kNear);
   3076 
   3077   __ CmpInstanceType(map, FIRST_JS_RECEIVER_TYPE);
   3078   __ j(above_equal, &ok, Label::kNear);
   3079 
   3080   __ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
   3081   __ j(equal, &ok, Label::kNear);
   3082 
   3083   __ CompareRoot(return_value, Heap::kUndefinedValueRootIndex);
   3084   __ j(equal, &ok, Label::kNear);
   3085 
   3086   __ CompareRoot(return_value, Heap::kTrueValueRootIndex);
   3087   __ j(equal, &ok, Label::kNear);
   3088 
   3089   __ CompareRoot(return_value, Heap::kFalseValueRootIndex);
   3090   __ j(equal, &ok, Label::kNear);
   3091 
   3092   __ CompareRoot(return_value, Heap::kNullValueRootIndex);
   3093   __ j(equal, &ok, Label::kNear);
   3094 
   3095   __ Abort(kAPICallReturnedInvalidObject);
   3096 
   3097   __ bind(&ok);
   3098 #endif
   3099 
   3100   if (stack_space_operand != nullptr) {
   3101     DCHECK_EQ(stack_space, 0);
   3102     __ PopReturnAddressTo(rcx);
   3103     __ addq(rsp, rbx);
   3104     __ jmp(rcx);
   3105   } else {
   3106     __ ret(stack_space * kPointerSize);
   3107   }
   3108 
   3109   // Re-throw by promoting a scheduled exception.
   3110   __ bind(&promote_scheduled_exception);
   3111   __ TailCallRuntime(Runtime::kPromoteScheduledException);
   3112 
   3113   // HandleScope limit has changed. Delete allocated extensions.
   3114   __ bind(&delete_allocated_handles);
   3115   __ movp(Operand(base_reg, kLimitOffset), prev_limit_reg);
   3116   __ movp(prev_limit_reg, rax);
   3117   __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
   3118   __ LoadAddress(rax,
   3119                  ExternalReference::delete_handle_scope_extensions(isolate));
   3120   __ call(rax);
   3121   __ movp(rax, prev_limit_reg);
   3122   __ jmp(&leave_exit_frame);
   3123 }
   3124 
   3125 void CallApiCallbackStub::Generate(MacroAssembler* masm) {
   3126   // ----------- S t a t e -------------
   3127   //  -- rdi                 : callee
   3128   //  -- rbx                 : call_data
   3129   //  -- rcx                 : holder
   3130   //  -- rdx                 : api_function_address
   3131   //  -- rsi                 : context
   3132   //  -- rax                 : number of arguments if argc is a register
   3133   //  -- rsp[0]              : return address
   3134   //  -- rsp[8]              : last argument
   3135   //  -- ...
   3136   //  -- rsp[argc * 8]       : first argument
   3137   //  -- rsp[(argc + 1) * 8] : receiver
   3138   // -----------------------------------
   3139 
   3140   Register callee = rdi;
   3141   Register call_data = rbx;
   3142   Register holder = rcx;
   3143   Register api_function_address = rdx;
   3144   Register context = rsi;
   3145   Register return_address = r8;
   3146 
   3147   typedef FunctionCallbackArguments FCA;
   3148 
   3149   STATIC_ASSERT(FCA::kContextSaveIndex == 6);
   3150   STATIC_ASSERT(FCA::kCalleeIndex == 5);
   3151   STATIC_ASSERT(FCA::kDataIndex == 4);
   3152   STATIC_ASSERT(FCA::kReturnValueOffset == 3);
   3153   STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
   3154   STATIC_ASSERT(FCA::kIsolateIndex == 1);
   3155   STATIC_ASSERT(FCA::kHolderIndex == 0);
   3156   STATIC_ASSERT(FCA::kNewTargetIndex == 7);
   3157   STATIC_ASSERT(FCA::kArgsLength == 8);
   3158 
   3159   __ PopReturnAddressTo(return_address);
   3160 
   3161   // new target
   3162   __ PushRoot(Heap::kUndefinedValueRootIndex);
   3163 
   3164   // context save
   3165   __ Push(context);
   3166 
   3167   // callee
   3168   __ Push(callee);
   3169 
   3170   // call data
   3171   __ Push(call_data);
   3172   Register scratch = call_data;
   3173   if (!this->call_data_undefined()) {
   3174     __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
   3175   }
   3176   // return value
   3177   __ Push(scratch);
   3178   // return value default
   3179   __ Push(scratch);
   3180   // isolate
   3181   __ Move(scratch, ExternalReference::isolate_address(masm->isolate()));
   3182   __ Push(scratch);
   3183   // holder
   3184   __ Push(holder);
   3185 
   3186   __ movp(scratch, rsp);
   3187   // Push return address back on stack.
   3188   __ PushReturnAddressFrom(return_address);
   3189 
   3190   if (!this->is_lazy()) {
   3191     // load context from callee
   3192     __ movp(context, FieldOperand(callee, JSFunction::kContextOffset));
   3193   }
   3194 
   3195   // Allocate the v8::Arguments structure in the arguments' space since
   3196   // it's not controlled by GC.
   3197   const int kApiStackSpace = 3;
   3198 
   3199   PrepareCallApiFunction(masm, kApiStackSpace);
   3200 
   3201   // FunctionCallbackInfo::implicit_args_.
   3202   int argc = this->argc();
   3203   __ movp(StackSpaceOperand(0), scratch);
   3204   __ addp(scratch, Immediate((argc + FCA::kArgsLength - 1) * kPointerSize));
   3205   // FunctionCallbackInfo::values_.
   3206   __ movp(StackSpaceOperand(1), scratch);
   3207   // FunctionCallbackInfo::length_.
   3208   __ Set(StackSpaceOperand(2), argc);
   3209 
   3210 #if defined(__MINGW64__) || defined(_WIN64)
   3211   Register arguments_arg = rcx;
   3212   Register callback_arg = rdx;
   3213 #else
   3214   Register arguments_arg = rdi;
   3215   Register callback_arg = rsi;
   3216 #endif
   3217 
   3218   // It's okay if api_function_address == callback_arg
   3219   // but not arguments_arg
   3220   DCHECK(!api_function_address.is(arguments_arg));
   3221 
   3222   // v8::InvocationCallback's argument.
   3223   __ leap(arguments_arg, StackSpaceOperand(0));
   3224 
   3225   ExternalReference thunk_ref =
   3226       ExternalReference::invoke_function_callback(masm->isolate());
   3227 
   3228   // Accessor for FunctionCallbackInfo and first js arg.
   3229   StackArgumentsAccessor args_from_rbp(rbp, FCA::kArgsLength + 1,
   3230                                        ARGUMENTS_DONT_CONTAIN_RECEIVER);
   3231   Operand context_restore_operand = args_from_rbp.GetArgumentOperand(
   3232       FCA::kArgsLength - FCA::kContextSaveIndex);
   3233   Operand length_operand = StackSpaceOperand(2);
   3234   Operand return_value_operand = args_from_rbp.GetArgumentOperand(
   3235       this->is_store() ? 0 : FCA::kArgsLength - FCA::kReturnValueOffset);
   3236   int stack_space = 0;
   3237   Operand* stack_space_operand = &length_operand;
   3238   stack_space = argc + FCA::kArgsLength + 1;
   3239   stack_space_operand = nullptr;
   3240   CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, callback_arg,
   3241                            stack_space, stack_space_operand,
   3242                            return_value_operand, &context_restore_operand);
   3243 }
   3244 
   3245 
   3246 void CallApiGetterStub::Generate(MacroAssembler* masm) {
   3247 #if defined(__MINGW64__) || defined(_WIN64)
   3248   Register getter_arg = r8;
   3249   Register accessor_info_arg = rdx;
   3250   Register name_arg = rcx;
   3251 #else
   3252   Register getter_arg = rdx;
   3253   Register accessor_info_arg = rsi;
   3254   Register name_arg = rdi;
   3255 #endif
   3256   Register api_function_address = r8;
   3257   Register receiver = ApiGetterDescriptor::ReceiverRegister();
   3258   Register holder = ApiGetterDescriptor::HolderRegister();
   3259   Register callback = ApiGetterDescriptor::CallbackRegister();
   3260   Register scratch = rax;
   3261   DCHECK(!AreAliased(receiver, holder, callback, scratch));
   3262 
   3263   // Build v8::PropertyCallbackInfo::args_ array on the stack and push property
   3264   // name below the exit frame to make GC aware of them.
   3265   STATIC_ASSERT(PropertyCallbackArguments::kShouldThrowOnErrorIndex == 0);
   3266   STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 1);
   3267   STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 2);
   3268   STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 3);
   3269   STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 4);
   3270   STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 5);
   3271   STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 6);
   3272   STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 7);
   3273 
   3274   // Insert additional parameters into the stack frame above return address.
   3275   __ PopReturnAddressTo(scratch);
   3276   __ Push(receiver);
   3277   __ Push(FieldOperand(callback, AccessorInfo::kDataOffset));
   3278   __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
   3279   __ Push(kScratchRegister);  // return value
   3280   __ Push(kScratchRegister);  // return value default
   3281   __ PushAddress(ExternalReference::isolate_address(isolate()));
   3282   __ Push(holder);
   3283   __ Push(Smi::kZero);  // should_throw_on_error -> false
   3284   __ Push(FieldOperand(callback, AccessorInfo::kNameOffset));
   3285   __ PushReturnAddressFrom(scratch);
   3286 
   3287   // v8::PropertyCallbackInfo::args_ array and name handle.
   3288   const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;
   3289 
   3290   // Allocate v8::PropertyCallbackInfo in non-GCed stack space.
   3291   const int kArgStackSpace = 1;
   3292 
   3293   // Load address of v8::PropertyAccessorInfo::args_ array.
   3294   __ leap(scratch, Operand(rsp, 2 * kPointerSize));
   3295 
   3296   PrepareCallApiFunction(masm, kArgStackSpace);
   3297   // Create v8::PropertyCallbackInfo object on the stack and initialize
   3298   // it's args_ field.
   3299   Operand info_object = StackSpaceOperand(0);
   3300   __ movp(info_object, scratch);
   3301 
   3302   __ leap(name_arg, Operand(scratch, -kPointerSize));
   3303   // The context register (rsi) has been saved in PrepareCallApiFunction and
   3304   // could be used to pass arguments.
   3305   __ leap(accessor_info_arg, info_object);
   3306 
   3307   ExternalReference thunk_ref =
   3308       ExternalReference::invoke_accessor_getter_callback(isolate());
   3309 
   3310   // It's okay if api_function_address == getter_arg
   3311   // but not accessor_info_arg or name_arg
   3312   DCHECK(!api_function_address.is(accessor_info_arg));
   3313   DCHECK(!api_function_address.is(name_arg));
   3314   __ movp(scratch, FieldOperand(callback, AccessorInfo::kJsGetterOffset));
   3315   __ movp(api_function_address,
   3316           FieldOperand(scratch, Foreign::kForeignAddressOffset));
   3317 
   3318   // +3 is to skip prolog, return address and name handle.
   3319   Operand return_value_operand(
   3320       rbp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize);
   3321   CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, getter_arg,
   3322                            kStackUnwindSpace, nullptr, return_value_operand,
   3323                            NULL);
   3324 }
   3325 
   3326 #undef __
   3327 
   3328 }  // namespace internal
   3329 }  // namespace v8
   3330 
   3331 #endif  // V8_TARGET_ARCH_X64
   3332