Home | History | Annotate | Download | only in x64
      1 // Copyright 2013 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 
     28 #include "v8.h"
     29 
     30 #if V8_TARGET_ARCH_X64
     31 
     32 #include "bootstrapper.h"
     33 #include "code-stubs.h"
     34 #include "regexp-macro-assembler.h"
     35 #include "stub-cache.h"
     36 #include "runtime.h"
     37 
     38 namespace v8 {
     39 namespace internal {
     40 
     41 
     42 void FastNewClosureStub::InitializeInterfaceDescriptor(
     43     Isolate* isolate,
     44     CodeStubInterfaceDescriptor* descriptor) {
     45   static Register registers[] = { rbx };
     46   descriptor->register_param_count_ = 1;
     47   descriptor->register_params_ = registers;
     48   descriptor->deoptimization_handler_ =
     49       Runtime::FunctionForId(Runtime::kNewClosureFromStubFailure)->entry;
     50 }
     51 
     52 
     53 void ToNumberStub::InitializeInterfaceDescriptor(
     54     Isolate* isolate,
     55     CodeStubInterfaceDescriptor* descriptor) {
     56   static Register registers[] = { rax };
     57   descriptor->register_param_count_ = 1;
     58   descriptor->register_params_ = registers;
     59   descriptor->deoptimization_handler_ = NULL;
     60 }
     61 
     62 
     63 void NumberToStringStub::InitializeInterfaceDescriptor(
     64     Isolate* isolate,
     65     CodeStubInterfaceDescriptor* descriptor) {
     66   static Register registers[] = { rax };
     67   descriptor->register_param_count_ = 1;
     68   descriptor->register_params_ = registers;
     69   descriptor->deoptimization_handler_ =
     70       Runtime::FunctionForId(Runtime::kNumberToString)->entry;
     71 }
     72 
     73 
     74 void FastCloneShallowArrayStub::InitializeInterfaceDescriptor(
     75     Isolate* isolate,
     76     CodeStubInterfaceDescriptor* descriptor) {
     77   static Register registers[] = { rax, rbx, rcx };
     78   descriptor->register_param_count_ = 3;
     79   descriptor->register_params_ = registers;
     80   descriptor->deoptimization_handler_ =
     81       Runtime::FunctionForId(Runtime::kCreateArrayLiteralStubBailout)->entry;
     82 }
     83 
     84 
     85 void FastCloneShallowObjectStub::InitializeInterfaceDescriptor(
     86     Isolate* isolate,
     87     CodeStubInterfaceDescriptor* descriptor) {
     88   static Register registers[] = { rax, rbx, rcx, rdx };
     89   descriptor->register_param_count_ = 4;
     90   descriptor->register_params_ = registers;
     91   descriptor->deoptimization_handler_ =
     92       Runtime::FunctionForId(Runtime::kCreateObjectLiteral)->entry;
     93 }
     94 
     95 
     96 void CreateAllocationSiteStub::InitializeInterfaceDescriptor(
     97     Isolate* isolate,
     98     CodeStubInterfaceDescriptor* descriptor) {
     99   static Register registers[] = { rbx };
    100   descriptor->register_param_count_ = 1;
    101   descriptor->register_params_ = registers;
    102   descriptor->deoptimization_handler_ = NULL;
    103 }
    104 
    105 
    106 void KeyedLoadFastElementStub::InitializeInterfaceDescriptor(
    107     Isolate* isolate,
    108     CodeStubInterfaceDescriptor* descriptor) {
    109   static Register registers[] = { rdx, rax };
    110   descriptor->register_param_count_ = 2;
    111   descriptor->register_params_ = registers;
    112   descriptor->deoptimization_handler_ =
    113       FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure);
    114 }
    115 
    116 
    117 void KeyedLoadDictionaryElementStub::InitializeInterfaceDescriptor(
    118     Isolate* isolate,
    119     CodeStubInterfaceDescriptor* descriptor) {
    120   static Register registers[] = { rdx, rax };
    121   descriptor->register_param_count_ = 2;
    122   descriptor->register_params_ = registers;
    123   descriptor->deoptimization_handler_ =
    124     FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure);
    125 }
    126 
    127 
    128 void LoadFieldStub::InitializeInterfaceDescriptor(
    129     Isolate* isolate,
    130     CodeStubInterfaceDescriptor* descriptor) {
    131   static Register registers[] = { rax };
    132   descriptor->register_param_count_ = 1;
    133   descriptor->register_params_ = registers;
    134   descriptor->deoptimization_handler_ = NULL;
    135 }
    136 
    137 
    138 void KeyedLoadFieldStub::InitializeInterfaceDescriptor(
    139     Isolate* isolate,
    140     CodeStubInterfaceDescriptor* descriptor) {
    141   static Register registers[] = { rdx };
    142   descriptor->register_param_count_ = 1;
    143   descriptor->register_params_ = registers;
    144   descriptor->deoptimization_handler_ = NULL;
    145 }
    146 
    147 
    148 void KeyedArrayCallStub::InitializeInterfaceDescriptor(
    149     Isolate* isolate,
    150     CodeStubInterfaceDescriptor* descriptor) {
    151   static Register registers[] = { rcx };
    152   descriptor->register_param_count_ = 1;
    153   descriptor->register_params_ = registers;
    154   descriptor->continuation_type_ = TAIL_CALL_CONTINUATION;
    155   descriptor->handler_arguments_mode_ = PASS_ARGUMENTS;
    156   descriptor->deoptimization_handler_ =
    157       FUNCTION_ADDR(KeyedCallIC_MissFromStubFailure);
    158 }
    159 
    160 
    161 void KeyedStoreFastElementStub::InitializeInterfaceDescriptor(
    162     Isolate* isolate,
    163     CodeStubInterfaceDescriptor* descriptor) {
    164   static Register registers[] = { rdx, rcx, rax };
    165   descriptor->register_param_count_ = 3;
    166   descriptor->register_params_ = registers;
    167   descriptor->deoptimization_handler_ =
    168       FUNCTION_ADDR(KeyedStoreIC_MissFromStubFailure);
    169 }
    170 
    171 
    172 void TransitionElementsKindStub::InitializeInterfaceDescriptor(
    173     Isolate* isolate,
    174     CodeStubInterfaceDescriptor* descriptor) {
    175   static Register registers[] = { rax, rbx };
    176   descriptor->register_param_count_ = 2;
    177   descriptor->register_params_ = registers;
    178   descriptor->deoptimization_handler_ =
    179       Runtime::FunctionForId(Runtime::kTransitionElementsKind)->entry;
    180 }
    181 
    182 
    183 void BinaryOpICStub::InitializeInterfaceDescriptor(
    184     Isolate* isolate,
    185     CodeStubInterfaceDescriptor* descriptor) {
    186   static Register registers[] = { rdx, rax };
    187   descriptor->register_param_count_ = 2;
    188   descriptor->register_params_ = registers;
    189   descriptor->deoptimization_handler_ = FUNCTION_ADDR(BinaryOpIC_Miss);
    190   descriptor->SetMissHandler(
    191       ExternalReference(IC_Utility(IC::kBinaryOpIC_Miss), isolate));
    192 }
    193 
    194 
    195 static void InitializeArrayConstructorDescriptor(
    196     Isolate* isolate,
    197     CodeStubInterfaceDescriptor* descriptor,
    198     int constant_stack_parameter_count) {
    199   // register state
    200   // rax -- number of arguments
    201   // rdi -- function
    202   // rbx -- type info cell with elements kind
    203   static Register registers_variable_args[] = { rdi, rbx, rax };
    204   static Register registers_no_args[] = { rdi, rbx };
    205 
    206   if (constant_stack_parameter_count == 0) {
    207     descriptor->register_param_count_ = 2;
    208     descriptor->register_params_ = registers_no_args;
    209   } else {
    210     // stack param count needs (constructor pointer, and single argument)
    211     descriptor->handler_arguments_mode_ = PASS_ARGUMENTS;
    212     descriptor->stack_parameter_count_ = rax;
    213     descriptor->register_param_count_ = 3;
    214     descriptor->register_params_ = registers_variable_args;
    215   }
    216 
    217   descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
    218   descriptor->function_mode_ = JS_FUNCTION_STUB_MODE;
    219   descriptor->deoptimization_handler_ =
    220       Runtime::FunctionForId(Runtime::kArrayConstructor)->entry;
    221 }
    222 
    223 
    224 static void InitializeInternalArrayConstructorDescriptor(
    225     Isolate* isolate,
    226     CodeStubInterfaceDescriptor* descriptor,
    227     int constant_stack_parameter_count) {
    228   // register state
    229   // rax -- number of arguments
    230   // rdi -- constructor function
    231   static Register registers_variable_args[] = { rdi, rax };
    232   static Register registers_no_args[] = { rdi };
    233 
    234   if (constant_stack_parameter_count == 0) {
    235     descriptor->register_param_count_ = 1;
    236     descriptor->register_params_ = registers_no_args;
    237   } else {
    238     // stack param count needs (constructor pointer, and single argument)
    239     descriptor->handler_arguments_mode_ = PASS_ARGUMENTS;
    240     descriptor->stack_parameter_count_ = rax;
    241     descriptor->register_param_count_ = 2;
    242     descriptor->register_params_ = registers_variable_args;
    243   }
    244 
    245   descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
    246   descriptor->function_mode_ = JS_FUNCTION_STUB_MODE;
    247   descriptor->deoptimization_handler_ =
    248       Runtime::FunctionForId(Runtime::kInternalArrayConstructor)->entry;
    249 }
    250 
    251 
    252 void ArrayNoArgumentConstructorStub::InitializeInterfaceDescriptor(
    253     Isolate* isolate,
    254     CodeStubInterfaceDescriptor* descriptor) {
    255   InitializeArrayConstructorDescriptor(isolate, descriptor, 0);
    256 }
    257 
    258 
    259 void ArraySingleArgumentConstructorStub::InitializeInterfaceDescriptor(
    260     Isolate* isolate,
    261     CodeStubInterfaceDescriptor* descriptor) {
    262   InitializeArrayConstructorDescriptor(isolate, descriptor, 1);
    263 }
    264 
    265 
    266 void ArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor(
    267     Isolate* isolate,
    268     CodeStubInterfaceDescriptor* descriptor) {
    269   InitializeArrayConstructorDescriptor(isolate, descriptor, -1);
    270 }
    271 
    272 
    273 void InternalArrayNoArgumentConstructorStub::InitializeInterfaceDescriptor(
    274     Isolate* isolate,
    275     CodeStubInterfaceDescriptor* descriptor) {
    276   InitializeInternalArrayConstructorDescriptor(isolate, descriptor, 0);
    277 }
    278 
    279 
    280 void InternalArraySingleArgumentConstructorStub::InitializeInterfaceDescriptor(
    281     Isolate* isolate,
    282     CodeStubInterfaceDescriptor* descriptor) {
    283   InitializeInternalArrayConstructorDescriptor(isolate, descriptor, 1);
    284 }
    285 
    286 
    287 void InternalArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor(
    288     Isolate* isolate,
    289     CodeStubInterfaceDescriptor* descriptor) {
    290   InitializeInternalArrayConstructorDescriptor(isolate, descriptor, -1);
    291 }
    292 
    293 
    294 void CompareNilICStub::InitializeInterfaceDescriptor(
    295     Isolate* isolate,
    296     CodeStubInterfaceDescriptor* descriptor) {
    297   static Register registers[] = { rax };
    298   descriptor->register_param_count_ = 1;
    299   descriptor->register_params_ = registers;
    300   descriptor->deoptimization_handler_ =
    301       FUNCTION_ADDR(CompareNilIC_Miss);
    302   descriptor->SetMissHandler(
    303       ExternalReference(IC_Utility(IC::kCompareNilIC_Miss), isolate));
    304 }
    305 
    306 
    307 void ToBooleanStub::InitializeInterfaceDescriptor(
    308     Isolate* isolate,
    309     CodeStubInterfaceDescriptor* descriptor) {
    310   static Register registers[] = { rax };
    311   descriptor->register_param_count_ = 1;
    312   descriptor->register_params_ = registers;
    313   descriptor->deoptimization_handler_ =
    314      FUNCTION_ADDR(ToBooleanIC_Miss);
    315   descriptor->SetMissHandler(
    316      ExternalReference(IC_Utility(IC::kToBooleanIC_Miss), isolate));
    317 }
    318 
    319 
    320 void StoreGlobalStub::InitializeInterfaceDescriptor(
    321     Isolate* isolate,
    322     CodeStubInterfaceDescriptor* descriptor) {
    323   static Register registers[] = { rdx, rcx, rax };
    324   descriptor->register_param_count_ = 3;
    325   descriptor->register_params_ = registers;
    326   descriptor->deoptimization_handler_ =
    327       FUNCTION_ADDR(StoreIC_MissFromStubFailure);
    328 }
    329 
    330 
    331 void ElementsTransitionAndStoreStub::InitializeInterfaceDescriptor(
    332     Isolate* isolate,
    333     CodeStubInterfaceDescriptor* descriptor) {
    334   static Register registers[] = { rax, rbx, rcx, rdx };
    335   descriptor->register_param_count_ = 4;
    336   descriptor->register_params_ = registers;
    337   descriptor->deoptimization_handler_ =
    338       FUNCTION_ADDR(ElementsTransitionAndStoreIC_Miss);
    339 }
    340 
    341 
    342 void NewStringAddStub::InitializeInterfaceDescriptor(
    343     Isolate* isolate,
    344     CodeStubInterfaceDescriptor* descriptor) {
    345   static Register registers[] = { rdx, rax };
    346   descriptor->register_param_count_ = 2;
    347   descriptor->register_params_ = registers;
    348   descriptor->deoptimization_handler_ =
    349       Runtime::FunctionForId(Runtime::kStringAdd)->entry;
    350 }
    351 
    352 
    353 #define __ ACCESS_MASM(masm)
    354 
    355 
    356 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
    357   // Update the static counter each time a new code stub is generated.
    358   Isolate* isolate = masm->isolate();
    359   isolate->counters()->code_stubs()->Increment();
    360 
    361   CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate);
    362   int param_count = descriptor->register_param_count_;
    363   {
    364     // Call the runtime system in a fresh internal frame.
    365     FrameScope scope(masm, StackFrame::INTERNAL);
    366     ASSERT(descriptor->register_param_count_ == 0 ||
    367            rax.is(descriptor->register_params_[param_count - 1]));
    368     // Push arguments
    369     for (int i = 0; i < param_count; ++i) {
    370       __ push(descriptor->register_params_[i]);
    371     }
    372     ExternalReference miss = descriptor->miss_handler();
    373     __ CallExternalReference(miss, descriptor->register_param_count_);
    374   }
    375 
    376   __ Ret();
    377 }
    378 
    379 
    380 void FastNewContextStub::Generate(MacroAssembler* masm) {
    381   // Try to allocate the context in new space.
    382   Label gc;
    383   int length = slots_ + Context::MIN_CONTEXT_SLOTS;
    384   __ Allocate((length * kPointerSize) + FixedArray::kHeaderSize,
    385               rax, rbx, rcx, &gc, TAG_OBJECT);
    386 
    387   // Get the function from the stack.
    388   StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
    389   __ movq(rcx, args.GetArgumentOperand(0));
    390 
    391   // Set up the object header.
    392   __ LoadRoot(kScratchRegister, Heap::kFunctionContextMapRootIndex);
    393   __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister);
    394   __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length));
    395 
    396   // Set up the fixed slots.
    397   __ Set(rbx, 0);  // Set to NULL.
    398   __ movq(Operand(rax, Context::SlotOffset(Context::CLOSURE_INDEX)), rcx);
    399   __ movq(Operand(rax, Context::SlotOffset(Context::PREVIOUS_INDEX)), rsi);
    400   __ movq(Operand(rax, Context::SlotOffset(Context::EXTENSION_INDEX)), rbx);
    401 
    402   // Copy the global object from the previous context.
    403   __ movq(rbx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
    404   __ movq(Operand(rax, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)), rbx);
    405 
    406   // Initialize the rest of the slots to undefined.
    407   __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
    408   for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) {
    409     __ movq(Operand(rax, Context::SlotOffset(i)), rbx);
    410   }
    411 
    412   // Return and remove the on-stack parameter.
    413   __ movq(rsi, rax);
    414   __ ret(1 * kPointerSize);
    415 
    416   // Need to collect. Call into runtime system.
    417   __ bind(&gc);
    418   __ TailCallRuntime(Runtime::kNewFunctionContext, 1, 1);
    419 }
    420 
    421 
    422 void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
    423   // Stack layout on entry:
    424   //
    425   // [rsp + (1 * kPointerSize)] : function
    426   // [rsp + (2 * kPointerSize)] : serialized scope info
    427 
    428   // Try to allocate the context in new space.
    429   Label gc;
    430   int length = slots_ + Context::MIN_CONTEXT_SLOTS;
    431   __ Allocate(FixedArray::SizeFor(length),
    432               rax, rbx, rcx, &gc, TAG_OBJECT);
    433 
    434   // Get the function from the stack.
    435   StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER);
    436   __ movq(rcx, args.GetArgumentOperand(1));
    437   // Get the serialized scope info from the stack.
    438   __ movq(rbx, args.GetArgumentOperand(0));
    439 
    440   // Set up the object header.
    441   __ LoadRoot(kScratchRegister, Heap::kBlockContextMapRootIndex);
    442   __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister);
    443   __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length));
    444 
    445   // If this block context is nested in the native context we get a smi
    446   // sentinel instead of a function. The block context should get the
    447   // canonical empty function of the native context as its closure which
    448   // we still have to look up.
    449   Label after_sentinel;
    450   __ JumpIfNotSmi(rcx, &after_sentinel, Label::kNear);
    451   if (FLAG_debug_code) {
    452     __ cmpq(rcx, Immediate(0));
    453     __ Assert(equal, kExpected0AsASmiSentinel);
    454   }
    455   __ movq(rcx, GlobalObjectOperand());
    456   __ movq(rcx, FieldOperand(rcx, GlobalObject::kNativeContextOffset));
    457   __ movq(rcx, ContextOperand(rcx, Context::CLOSURE_INDEX));
    458   __ bind(&after_sentinel);
    459 
    460   // Set up the fixed slots.
    461   __ movq(ContextOperand(rax, Context::CLOSURE_INDEX), rcx);
    462   __ movq(ContextOperand(rax, Context::PREVIOUS_INDEX), rsi);
    463   __ movq(ContextOperand(rax, Context::EXTENSION_INDEX), rbx);
    464 
    465   // Copy the global object from the previous context.
    466   __ movq(rbx, ContextOperand(rsi, Context::GLOBAL_OBJECT_INDEX));
    467   __ movq(ContextOperand(rax, Context::GLOBAL_OBJECT_INDEX), rbx);
    468 
    469   // Initialize the rest of the slots to the hole value.
    470   __ LoadRoot(rbx, Heap::kTheHoleValueRootIndex);
    471   for (int i = 0; i < slots_; i++) {
    472     __ movq(ContextOperand(rax, i + Context::MIN_CONTEXT_SLOTS), rbx);
    473   }
    474 
    475   // Return and remove the on-stack parameter.
    476   __ movq(rsi, rax);
    477   __ ret(2 * kPointerSize);
    478 
    479   // Need to collect. Call into runtime system.
    480   __ bind(&gc);
    481   __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1);
    482 }
    483 
    484 
    485 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
    486   __ PushCallerSaved(save_doubles_);
    487   const int argument_count = 1;
    488   __ PrepareCallCFunction(argument_count);
    489   __ LoadAddress(arg_reg_1,
    490                  ExternalReference::isolate_address(masm->isolate()));
    491 
    492   AllowExternalCallThatCantCauseGC scope(masm);
    493   __ CallCFunction(
    494       ExternalReference::store_buffer_overflow_function(masm->isolate()),
    495       argument_count);
    496   __ PopCallerSaved(save_doubles_);
    497   __ ret(0);
    498 }
    499 
    500 
    501 class FloatingPointHelper : public AllStatic {
    502  public:
    503   enum ConvertUndefined {
    504     CONVERT_UNDEFINED_TO_ZERO,
    505     BAILOUT_ON_UNDEFINED
    506   };
    507   // Load the operands from rdx and rax into xmm0 and xmm1, as doubles.
    508   // If the operands are not both numbers, jump to not_numbers.
    509   // Leaves rdx and rax unchanged.  SmiOperands assumes both are smis.
    510   // NumberOperands assumes both are smis or heap numbers.
    511   static void LoadSSE2UnknownOperands(MacroAssembler* masm,
    512                                       Label* not_numbers);
    513 };
    514 
    515 
    516 void DoubleToIStub::Generate(MacroAssembler* masm) {
    517     Register input_reg = this->source();
    518     Register final_result_reg = this->destination();
    519     ASSERT(is_truncating());
    520 
    521     Label check_negative, process_64_bits, done;
    522 
    523     int double_offset = offset();
    524 
    525     // Account for return address and saved regs if input is rsp.
    526     if (input_reg.is(rsp)) double_offset += 3 * kPointerSize;
    527 
    528     MemOperand mantissa_operand(MemOperand(input_reg, double_offset));
    529     MemOperand exponent_operand(MemOperand(input_reg,
    530                                            double_offset + kDoubleSize / 2));
    531 
    532     Register scratch1;
    533     Register scratch_candidates[3] = { rbx, rdx, rdi };
    534     for (int i = 0; i < 3; i++) {
    535       scratch1 = scratch_candidates[i];
    536       if (!final_result_reg.is(scratch1) && !input_reg.is(scratch1)) break;
    537     }
    538 
    539     // Since we must use rcx for shifts below, use some other register (rax)
    540     // to calculate the result if ecx is the requested return register.
    541     Register result_reg = final_result_reg.is(rcx) ? rax : final_result_reg;
    542     // Save ecx if it isn't the return register and therefore volatile, or if it
    543     // is the return register, then save the temp register we use in its stead
    544     // for the result.
    545     Register save_reg = final_result_reg.is(rcx) ? rax : rcx;
    546     __ push(scratch1);
    547     __ push(save_reg);
    548 
    549     bool stash_exponent_copy = !input_reg.is(rsp);
    550     __ movl(scratch1, mantissa_operand);
    551     __ movsd(xmm0, mantissa_operand);
    552     __ movl(rcx, exponent_operand);
    553     if (stash_exponent_copy) __ push(rcx);
    554 
    555     __ andl(rcx, Immediate(HeapNumber::kExponentMask));
    556     __ shrl(rcx, Immediate(HeapNumber::kExponentShift));
    557     __ leal(result_reg, MemOperand(rcx, -HeapNumber::kExponentBias));
    558     __ cmpl(result_reg, Immediate(HeapNumber::kMantissaBits));
    559     __ j(below, &process_64_bits);
    560 
    561     // Result is entirely in lower 32-bits of mantissa
    562     int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize;
    563     __ subl(rcx, Immediate(delta));
    564     __ xorl(result_reg, result_reg);
    565     __ cmpl(rcx, Immediate(31));
    566     __ j(above, &done);
    567     __ shll_cl(scratch1);
    568     __ jmp(&check_negative);
    569 
    570     __ bind(&process_64_bits);
    571     __ cvttsd2siq(result_reg, xmm0);
    572     __ jmp(&done, Label::kNear);
    573 
    574     // If the double was negative, negate the integer result.
    575     __ bind(&check_negative);
    576     __ movl(result_reg, scratch1);
    577     __ negl(result_reg);
    578     if (stash_exponent_copy) {
    579         __ cmpl(MemOperand(rsp, 0), Immediate(0));
    580     } else {
    581         __ cmpl(exponent_operand, Immediate(0));
    582     }
    583     __ cmovl(greater, result_reg, scratch1);
    584 
    585     // Restore registers
    586     __ bind(&done);
    587     if (stash_exponent_copy) {
    588         __ addq(rsp, Immediate(kDoubleSize));
    589     }
    590     if (!final_result_reg.is(result_reg)) {
    591         ASSERT(final_result_reg.is(rcx));
    592         __ movl(final_result_reg, result_reg);
    593     }
    594     __ pop(save_reg);
    595     __ pop(scratch1);
    596     __ ret(0);
    597 }
    598 
    599 
    600 void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
    601   // TAGGED case:
    602   //   Input:
    603   //     rsp[8] : argument (should be number).
    604   //     rsp[0] : return address.
    605   //   Output:
    606   //     rax: tagged double result.
    607   // UNTAGGED case:
    608   //   Input::
    609   //     rsp[0] : return address.
    610   //     xmm1   : untagged double input argument
    611   //   Output:
    612   //     xmm1   : untagged double result.
    613 
    614   Label runtime_call;
    615   Label runtime_call_clear_stack;
    616   Label skip_cache;
    617   const bool tagged = (argument_type_ == TAGGED);
    618   if (tagged) {
    619     Label input_not_smi, loaded;
    620 
    621     // Test that rax is a number.
    622     StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
    623     __ movq(rax, args.GetArgumentOperand(0));
    624     __ JumpIfNotSmi(rax, &input_not_smi, Label::kNear);
    625     // Input is a smi. Untag and load it onto the FPU stack.
    626     // Then load the bits of the double into rbx.
    627     __ SmiToInteger32(rax, rax);
    628     __ subq(rsp, Immediate(kDoubleSize));
    629     __ Cvtlsi2sd(xmm1, rax);
    630     __ movsd(Operand(rsp, 0), xmm1);
    631     __ movq(rbx, xmm1);
    632     __ movq(rdx, xmm1);
    633     __ fld_d(Operand(rsp, 0));
    634     __ addq(rsp, Immediate(kDoubleSize));
    635     __ jmp(&loaded, Label::kNear);
    636 
    637     __ bind(&input_not_smi);
    638     // Check if input is a HeapNumber.
    639     __ LoadRoot(rbx, Heap::kHeapNumberMapRootIndex);
    640     __ cmpq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
    641     __ j(not_equal, &runtime_call);
    642     // Input is a HeapNumber. Push it on the FPU stack and load its
    643     // bits into rbx.
    644     __ fld_d(FieldOperand(rax, HeapNumber::kValueOffset));
    645     __ MoveDouble(rbx, FieldOperand(rax, HeapNumber::kValueOffset));
    646     __ movq(rdx, rbx);
    647 
    648     __ bind(&loaded);
    649   } else {  // UNTAGGED.
    650     __ movq(rbx, xmm1);
    651     __ movq(rdx, xmm1);
    652   }
    653 
    654   // ST[0] == double value, if TAGGED.
    655   // rbx = bits of double value.
    656   // rdx = also bits of double value.
    657   // Compute hash (h is 32 bits, bits are 64 and the shifts are arithmetic):
    658   //   h = h0 = bits ^ (bits >> 32);
    659   //   h ^= h >> 16;
    660   //   h ^= h >> 8;
    661   //   h = h & (cacheSize - 1);
    662   // or h = (h0 ^ (h0 >> 8) ^ (h0 >> 16) ^ (h0 >> 24)) & (cacheSize - 1)
    663   __ sar(rdx, Immediate(32));
    664   __ xorl(rdx, rbx);
    665   __ movl(rcx, rdx);
    666   __ movl(rax, rdx);
    667   __ movl(rdi, rdx);
    668   __ sarl(rdx, Immediate(8));
    669   __ sarl(rcx, Immediate(16));
    670   __ sarl(rax, Immediate(24));
    671   __ xorl(rcx, rdx);
    672   __ xorl(rax, rdi);
    673   __ xorl(rcx, rax);
    674   ASSERT(IsPowerOf2(TranscendentalCache::SubCache::kCacheSize));
    675   __ andl(rcx, Immediate(TranscendentalCache::SubCache::kCacheSize - 1));
    676 
    677   // ST[0] == double value.
    678   // rbx = bits of double value.
    679   // rcx = TranscendentalCache::hash(double value).
    680   ExternalReference cache_array =
    681       ExternalReference::transcendental_cache_array_address(masm->isolate());
    682   __ Move(rax, cache_array);
    683   int cache_array_index =
    684       type_ * sizeof(masm->isolate()->transcendental_cache()->caches_[0]);
    685   __ movq(rax, Operand(rax, cache_array_index));
    686   // rax points to the cache for the type type_.
    687   // If NULL, the cache hasn't been initialized yet, so go through runtime.
    688   __ testq(rax, rax);
    689   __ j(zero, &runtime_call_clear_stack);  // Only clears stack if TAGGED.
    690 #ifdef DEBUG
    691   // Check that the layout of cache elements match expectations.
    692   {  // NOLINT - doesn't like a single brace on a line.
    693     TranscendentalCache::SubCache::Element test_elem[2];
    694     char* elem_start = reinterpret_cast<char*>(&test_elem[0]);
    695     char* elem2_start = reinterpret_cast<char*>(&test_elem[1]);
    696     char* elem_in0  = reinterpret_cast<char*>(&(test_elem[0].in[0]));
    697     char* elem_in1  = reinterpret_cast<char*>(&(test_elem[0].in[1]));
    698     char* elem_out = reinterpret_cast<char*>(&(test_elem[0].output));
    699     // Two uint_32's and a pointer per element.
    700     CHECK_EQ(2 * kIntSize + 1 * kPointerSize,
    701              static_cast<int>(elem2_start - elem_start));
    702     CHECK_EQ(0, static_cast<int>(elem_in0 - elem_start));
    703     CHECK_EQ(kIntSize, static_cast<int>(elem_in1 - elem_start));
    704     CHECK_EQ(2 * kIntSize, static_cast<int>(elem_out - elem_start));
    705   }
    706 #endif
    707   // Find the address of the rcx'th entry in the cache, i.e., &rax[rcx*16].
    708   __ addl(rcx, rcx);
    709   __ lea(rcx, Operand(rax, rcx, times_8, 0));
    710   // Check if cache matches: Double value is stored in uint32_t[2] array.
    711   Label cache_miss;
    712   __ cmpq(rbx, Operand(rcx, 0));
    713   __ j(not_equal, &cache_miss, Label::kNear);
    714   // Cache hit!
    715   Counters* counters = masm->isolate()->counters();
    716   __ IncrementCounter(counters->transcendental_cache_hit(), 1);
    717   __ movq(rax, Operand(rcx, 2 * kIntSize));
    718   if (tagged) {
    719     __ fstp(0);  // Clear FPU stack.
    720     __ ret(kPointerSize);
    721   } else {  // UNTAGGED.
    722     __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
    723     __ Ret();
    724   }
    725 
    726   __ bind(&cache_miss);
    727   __ IncrementCounter(counters->transcendental_cache_miss(), 1);
    728   // Update cache with new value.
    729   if (tagged) {
    730   __ AllocateHeapNumber(rax, rdi, &runtime_call_clear_stack);
    731   } else {  // UNTAGGED.
    732     __ AllocateHeapNumber(rax, rdi, &skip_cache);
    733     __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm1);
    734     __ fld_d(FieldOperand(rax, HeapNumber::kValueOffset));
    735   }
    736   GenerateOperation(masm, type_);
    737   __ movq(Operand(rcx, 0), rbx);
    738   __ movq(Operand(rcx, 2 * kIntSize), rax);
    739   __ fstp_d(FieldOperand(rax, HeapNumber::kValueOffset));
    740   if (tagged) {
    741     __ ret(kPointerSize);
    742   } else {  // UNTAGGED.
    743     __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
    744     __ Ret();
    745 
    746     // Skip cache and return answer directly, only in untagged case.
    747     __ bind(&skip_cache);
    748     __ subq(rsp, Immediate(kDoubleSize));
    749     __ movsd(Operand(rsp, 0), xmm1);
    750     __ fld_d(Operand(rsp, 0));
    751     GenerateOperation(masm, type_);
    752     __ fstp_d(Operand(rsp, 0));
    753     __ movsd(xmm1, Operand(rsp, 0));
    754     __ addq(rsp, Immediate(kDoubleSize));
    755     // We return the value in xmm1 without adding it to the cache, but
    756     // we cause a scavenging GC so that future allocations will succeed.
    757     {
    758       FrameScope scope(masm, StackFrame::INTERNAL);
    759       // Allocate an unused object bigger than a HeapNumber.
    760       __ Push(Smi::FromInt(2 * kDoubleSize));
    761       __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace);
    762     }
    763     __ Ret();
    764   }
    765 
    766   // Call runtime, doing whatever allocation and cleanup is necessary.
    767   if (tagged) {
    768     __ bind(&runtime_call_clear_stack);
    769     __ fstp(0);
    770     __ bind(&runtime_call);
    771     __ TailCallExternalReference(
    772         ExternalReference(RuntimeFunction(), masm->isolate()), 1, 1);
    773   } else {  // UNTAGGED.
    774     __ bind(&runtime_call_clear_stack);
    775     __ bind(&runtime_call);
    776     __ AllocateHeapNumber(rax, rdi, &skip_cache);
    777     __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm1);
    778     {
    779       FrameScope scope(masm, StackFrame::INTERNAL);
    780       __ push(rax);
    781       __ CallRuntime(RuntimeFunction(), 1);
    782     }
    783     __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
    784     __ Ret();
    785   }
    786 }
    787 
    788 
    789 Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() {
    790   switch (type_) {
    791     // Add more cases when necessary.
    792     case TranscendentalCache::SIN: return Runtime::kMath_sin;
    793     case TranscendentalCache::COS: return Runtime::kMath_cos;
    794     case TranscendentalCache::TAN: return Runtime::kMath_tan;
    795     case TranscendentalCache::LOG: return Runtime::kMath_log;
    796     default:
    797       UNIMPLEMENTED();
    798       return Runtime::kAbort;
    799   }
    800 }
    801 
    802 
    803 void TranscendentalCacheStub::GenerateOperation(
    804     MacroAssembler* masm, TranscendentalCache::Type type) {
    805   // Registers:
    806   // rax: Newly allocated HeapNumber, which must be preserved.
    807   // rbx: Bits of input double. Must be preserved.
    808   // rcx: Pointer to cache entry. Must be preserved.
    809   // st(0): Input double
    810   Label done;
    811   if (type == TranscendentalCache::SIN ||
    812       type == TranscendentalCache::COS ||
    813       type == TranscendentalCache::TAN) {
    814     // Both fsin and fcos require arguments in the range +/-2^63 and
    815     // return NaN for infinities and NaN. They can share all code except
    816     // the actual fsin/fcos operation.
    817     Label in_range;
    818     // If argument is outside the range -2^63..2^63, fsin/cos doesn't
    819     // work. We must reduce it to the appropriate range.
    820     __ movq(rdi, rbx);
    821     // Move exponent and sign bits to low bits.
    822     __ shr(rdi, Immediate(HeapNumber::kMantissaBits));
    823     // Remove sign bit.
    824     __ andl(rdi, Immediate((1 << HeapNumber::kExponentBits) - 1));
    825     int supported_exponent_limit = (63 + HeapNumber::kExponentBias);
    826     __ cmpl(rdi, Immediate(supported_exponent_limit));
    827     __ j(below, &in_range);
    828     // Check for infinity and NaN. Both return NaN for sin.
    829     __ cmpl(rdi, Immediate(0x7ff));
    830     Label non_nan_result;
    831     __ j(not_equal, &non_nan_result, Label::kNear);
    832     // Input is +/-Infinity or NaN. Result is NaN.
    833     __ fstp(0);
    834     // NaN is represented by 0x7ff8000000000000.
    835     __ subq(rsp, Immediate(kPointerSize));
    836     __ movl(Operand(rsp, 4), Immediate(0x7ff80000));
    837     __ movl(Operand(rsp, 0), Immediate(0x00000000));
    838     __ fld_d(Operand(rsp, 0));
    839     __ addq(rsp, Immediate(kPointerSize));
    840     __ jmp(&done);
    841 
    842     __ bind(&non_nan_result);
    843 
    844     // Use fpmod to restrict argument to the range +/-2*PI.
    845     __ movq(rdi, rax);  // Save rax before using fnstsw_ax.
    846     __ fldpi();
    847     __ fadd(0);
    848     __ fld(1);
    849     // FPU Stack: input, 2*pi, input.
    850     {
    851       Label no_exceptions;
    852       __ fwait();
    853       __ fnstsw_ax();
    854       // Clear if Illegal Operand or Zero Division exceptions are set.
    855       __ testl(rax, Immediate(5));  // #IO and #ZD flags of FPU status word.
    856       __ j(zero, &no_exceptions);
    857       __ fnclex();
    858       __ bind(&no_exceptions);
    859     }
    860 
    861     // Compute st(0) % st(1)
    862     {
    863       Label partial_remainder_loop;
    864       __ bind(&partial_remainder_loop);
    865       __ fprem1();
    866       __ fwait();
    867       __ fnstsw_ax();
    868       __ testl(rax, Immediate(0x400));  // Check C2 bit of FPU status word.
    869       // If C2 is set, computation only has partial result. Loop to
    870       // continue computation.
    871       __ j(not_zero, &partial_remainder_loop);
    872   }
    873     // FPU Stack: input, 2*pi, input % 2*pi
    874     __ fstp(2);
    875     // FPU Stack: input % 2*pi, 2*pi,
    876     __ fstp(0);
    877     // FPU Stack: input % 2*pi
    878     __ movq(rax, rdi);  // Restore rax, pointer to the new HeapNumber.
    879     __ bind(&in_range);
    880     switch (type) {
    881       case TranscendentalCache::SIN:
    882         __ fsin();
    883         break;
    884       case TranscendentalCache::COS:
    885         __ fcos();
    886         break;
    887       case TranscendentalCache::TAN:
    888         // FPTAN calculates tangent onto st(0) and pushes 1.0 onto the
    889         // FP register stack.
    890         __ fptan();
    891         __ fstp(0);  // Pop FP register stack.
    892         break;
    893       default:
    894         UNREACHABLE();
    895     }
    896     __ bind(&done);
    897   } else {
    898     ASSERT(type == TranscendentalCache::LOG);
    899     __ fldln2();
    900     __ fxch();
    901     __ fyl2x();
    902   }
    903 }
    904 
    905 
    906 void FloatingPointHelper::LoadSSE2UnknownOperands(MacroAssembler* masm,
    907                                                   Label* not_numbers) {
    908   Label load_smi_rdx, load_nonsmi_rax, load_smi_rax, load_float_rax, done;
    909   // Load operand in rdx into xmm0, or branch to not_numbers.
    910   __ LoadRoot(rcx, Heap::kHeapNumberMapRootIndex);
    911   __ JumpIfSmi(rdx, &load_smi_rdx);
    912   __ cmpq(FieldOperand(rdx, HeapObject::kMapOffset), rcx);
    913   __ j(not_equal, not_numbers);  // Argument in rdx is not a number.
    914   __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
    915   // Load operand in rax into xmm1, or branch to not_numbers.
    916   __ JumpIfSmi(rax, &load_smi_rax);
    917 
    918   __ bind(&load_nonsmi_rax);
    919   __ cmpq(FieldOperand(rax, HeapObject::kMapOffset), rcx);
    920   __ j(not_equal, not_numbers);
    921   __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
    922   __ jmp(&done);
    923 
    924   __ bind(&load_smi_rdx);
    925   __ SmiToInteger32(kScratchRegister, rdx);
    926   __ Cvtlsi2sd(xmm0, kScratchRegister);
    927   __ JumpIfNotSmi(rax, &load_nonsmi_rax);
    928 
    929   __ bind(&load_smi_rax);
    930   __ SmiToInteger32(kScratchRegister, rax);
    931   __ Cvtlsi2sd(xmm1, kScratchRegister);
    932   __ bind(&done);
    933 }
    934 
    935 
    936 void MathPowStub::Generate(MacroAssembler* masm) {
    937   const Register exponent = rdx;
    938   const Register base = rax;
    939   const Register scratch = rcx;
    940   const XMMRegister double_result = xmm3;
    941   const XMMRegister double_base = xmm2;
    942   const XMMRegister double_exponent = xmm1;
    943   const XMMRegister double_scratch = xmm4;
    944 
    945   Label call_runtime, done, exponent_not_smi, int_exponent;
    946 
    947   // Save 1 in double_result - we need this several times later on.
    948   __ movq(scratch, Immediate(1));
    949   __ Cvtlsi2sd(double_result, scratch);
    950 
    951   if (exponent_type_ == ON_STACK) {
    952     Label base_is_smi, unpack_exponent;
    953     // The exponent and base are supplied as arguments on the stack.
    954     // This can only happen if the stub is called from non-optimized code.
    955     // Load input parameters from stack.
    956     StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER);
    957     __ movq(base, args.GetArgumentOperand(0));
    958     __ movq(exponent, args.GetArgumentOperand(1));
    959     __ JumpIfSmi(base, &base_is_smi, Label::kNear);
    960     __ CompareRoot(FieldOperand(base, HeapObject::kMapOffset),
    961                    Heap::kHeapNumberMapRootIndex);
    962     __ j(not_equal, &call_runtime);
    963 
    964     __ movsd(double_base, FieldOperand(base, HeapNumber::kValueOffset));
    965     __ jmp(&unpack_exponent, Label::kNear);
    966 
    967     __ bind(&base_is_smi);
    968     __ SmiToInteger32(base, base);
    969     __ Cvtlsi2sd(double_base, base);
    970     __ bind(&unpack_exponent);
    971 
    972     __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
    973     __ SmiToInteger32(exponent, exponent);
    974     __ jmp(&int_exponent);
    975 
    976     __ bind(&exponent_not_smi);
    977     __ CompareRoot(FieldOperand(exponent, HeapObject::kMapOffset),
    978                    Heap::kHeapNumberMapRootIndex);
    979     __ j(not_equal, &call_runtime);
    980     __ movsd(double_exponent, FieldOperand(exponent, HeapNumber::kValueOffset));
    981   } else if (exponent_type_ == TAGGED) {
    982     __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
    983     __ SmiToInteger32(exponent, exponent);
    984     __ jmp(&int_exponent);
    985 
    986     __ bind(&exponent_not_smi);
    987     __ movsd(double_exponent, FieldOperand(exponent, HeapNumber::kValueOffset));
    988   }
    989 
    990   if (exponent_type_ != INTEGER) {
    991     Label fast_power, try_arithmetic_simplification;
    992     // Detect integer exponents stored as double.
    993     __ DoubleToI(exponent, double_exponent, double_scratch,
    994                  TREAT_MINUS_ZERO_AS_ZERO, &try_arithmetic_simplification);
    995     __ jmp(&int_exponent);
    996 
    997     __ bind(&try_arithmetic_simplification);
    998     __ cvttsd2si(exponent, double_exponent);
    999     // Skip to runtime if possibly NaN (indicated by the indefinite integer).
   1000     __ cmpl(exponent, Immediate(0x80000000u));
   1001     __ j(equal, &call_runtime);
   1002 
   1003     if (exponent_type_ == ON_STACK) {
   1004       // Detect square root case.  Crankshaft detects constant +/-0.5 at
   1005       // compile time and uses DoMathPowHalf instead.  We then skip this check
   1006       // for non-constant cases of +/-0.5 as these hardly occur.
   1007       Label continue_sqrt, continue_rsqrt, not_plus_half;
   1008       // Test for 0.5.
   1009       // Load double_scratch with 0.5.
   1010       __ movq(scratch, V8_UINT64_C(0x3FE0000000000000));
   1011       __ movq(double_scratch, scratch);
   1012       // Already ruled out NaNs for exponent.
   1013       __ ucomisd(double_scratch, double_exponent);
   1014       __ j(not_equal, &not_plus_half, Label::kNear);
   1015 
   1016       // Calculates square root of base.  Check for the special case of
   1017       // Math.pow(-Infinity, 0.5) == Infinity (ECMA spec, 15.8.2.13).
   1018       // According to IEEE-754, double-precision -Infinity has the highest
   1019       // 12 bits set and the lowest 52 bits cleared.
   1020       __ movq(scratch, V8_UINT64_C(0xFFF0000000000000));
   1021       __ movq(double_scratch, scratch);
   1022       __ ucomisd(double_scratch, double_base);
   1023       // Comparing -Infinity with NaN results in "unordered", which sets the
   1024       // zero flag as if both were equal.  However, it also sets the carry flag.
   1025       __ j(not_equal, &continue_sqrt, Label::kNear);
   1026       __ j(carry, &continue_sqrt, Label::kNear);
   1027 
   1028       // Set result to Infinity in the special case.
   1029       __ xorps(double_result, double_result);
   1030       __ subsd(double_result, double_scratch);
   1031       __ jmp(&done);
   1032 
   1033       __ bind(&continue_sqrt);
   1034       // sqrtsd returns -0 when input is -0.  ECMA spec requires +0.
   1035       __ xorps(double_scratch, double_scratch);
   1036       __ addsd(double_scratch, double_base);  // Convert -0 to 0.
   1037       __ sqrtsd(double_result, double_scratch);
   1038       __ jmp(&done);
   1039 
   1040       // Test for -0.5.
   1041       __ bind(&not_plus_half);
   1042       // Load double_scratch with -0.5 by substracting 1.
   1043       __ subsd(double_scratch, double_result);
   1044       // Already ruled out NaNs for exponent.
   1045       __ ucomisd(double_scratch, double_exponent);
   1046       __ j(not_equal, &fast_power, Label::kNear);
   1047 
   1048       // Calculates reciprocal of square root of base.  Check for the special
   1049       // case of Math.pow(-Infinity, -0.5) == 0 (ECMA spec, 15.8.2.13).
   1050       // According to IEEE-754, double-precision -Infinity has the highest
   1051       // 12 bits set and the lowest 52 bits cleared.
   1052       __ movq(scratch, V8_UINT64_C(0xFFF0000000000000));
   1053       __ movq(double_scratch, scratch);
   1054       __ ucomisd(double_scratch, double_base);
   1055       // Comparing -Infinity with NaN results in "unordered", which sets the
   1056       // zero flag as if both were equal.  However, it also sets the carry flag.
   1057       __ j(not_equal, &continue_rsqrt, Label::kNear);
   1058       __ j(carry, &continue_rsqrt, Label::kNear);
   1059 
   1060       // Set result to 0 in the special case.
   1061       __ xorps(double_result, double_result);
   1062       __ jmp(&done);
   1063 
   1064       __ bind(&continue_rsqrt);
   1065       // sqrtsd returns -0 when input is -0.  ECMA spec requires +0.
   1066       __ xorps(double_exponent, double_exponent);
   1067       __ addsd(double_exponent, double_base);  // Convert -0 to +0.
   1068       __ sqrtsd(double_exponent, double_exponent);
   1069       __ divsd(double_result, double_exponent);
   1070       __ jmp(&done);
   1071     }
   1072 
   1073     // Using FPU instructions to calculate power.
   1074     Label fast_power_failed;
   1075     __ bind(&fast_power);
   1076     __ fnclex();  // Clear flags to catch exceptions later.
   1077     // Transfer (B)ase and (E)xponent onto the FPU register stack.
   1078     __ subq(rsp, Immediate(kDoubleSize));
   1079     __ movsd(Operand(rsp, 0), double_exponent);
   1080     __ fld_d(Operand(rsp, 0));  // E
   1081     __ movsd(Operand(rsp, 0), double_base);
   1082     __ fld_d(Operand(rsp, 0));  // B, E
   1083 
   1084     // Exponent is in st(1) and base is in st(0)
   1085     // B ^ E = (2^(E * log2(B)) - 1) + 1 = (2^X - 1) + 1 for X = E * log2(B)
   1086     // FYL2X calculates st(1) * log2(st(0))
   1087     __ fyl2x();    // X
   1088     __ fld(0);     // X, X
   1089     __ frndint();  // rnd(X), X
   1090     __ fsub(1);    // rnd(X), X-rnd(X)
   1091     __ fxch(1);    // X - rnd(X), rnd(X)
   1092     // F2XM1 calculates 2^st(0) - 1 for -1 < st(0) < 1
   1093     __ f2xm1();    // 2^(X-rnd(X)) - 1, rnd(X)
   1094     __ fld1();     // 1, 2^(X-rnd(X)) - 1, rnd(X)
   1095     __ faddp(1);   // 2^(X-rnd(X)), rnd(X)
   1096     // FSCALE calculates st(0) * 2^st(1)
   1097     __ fscale();   // 2^X, rnd(X)
   1098     __ fstp(1);
   1099     // Bail out to runtime in case of exceptions in the status word.
   1100     __ fnstsw_ax();
   1101     __ testb(rax, Immediate(0x5F));  // Check for all but precision exception.
   1102     __ j(not_zero, &fast_power_failed, Label::kNear);
   1103     __ fstp_d(Operand(rsp, 0));
   1104     __ movsd(double_result, Operand(rsp, 0));
   1105     __ addq(rsp, Immediate(kDoubleSize));
   1106     __ jmp(&done);
   1107 
   1108     __ bind(&fast_power_failed);
   1109     __ fninit();
   1110     __ addq(rsp, Immediate(kDoubleSize));
   1111     __ jmp(&call_runtime);
   1112   }
   1113 
   1114   // Calculate power with integer exponent.
   1115   __ bind(&int_exponent);
   1116   const XMMRegister double_scratch2 = double_exponent;
   1117   // Back up exponent as we need to check if exponent is negative later.
   1118   __ movq(scratch, exponent);  // Back up exponent.
   1119   __ movsd(double_scratch, double_base);  // Back up base.
   1120   __ movsd(double_scratch2, double_result);  // Load double_exponent with 1.
   1121 
   1122   // Get absolute value of exponent.
   1123   Label no_neg, while_true, while_false;
   1124   __ testl(scratch, scratch);
   1125   __ j(positive, &no_neg, Label::kNear);
   1126   __ negl(scratch);
   1127   __ bind(&no_neg);
   1128 
   1129   __ j(zero, &while_false, Label::kNear);
   1130   __ shrl(scratch, Immediate(1));
   1131   // Above condition means CF==0 && ZF==0.  This means that the
   1132   // bit that has been shifted out is 0 and the result is not 0.
   1133   __ j(above, &while_true, Label::kNear);
   1134   __ movsd(double_result, double_scratch);
   1135   __ j(zero, &while_false, Label::kNear);
   1136 
   1137   __ bind(&while_true);
   1138   __ shrl(scratch, Immediate(1));
   1139   __ mulsd(double_scratch, double_scratch);
   1140   __ j(above, &while_true, Label::kNear);
   1141   __ mulsd(double_result, double_scratch);
   1142   __ j(not_zero, &while_true);
   1143 
   1144   __ bind(&while_false);
   1145   // If the exponent is negative, return 1/result.
   1146   __ testl(exponent, exponent);
   1147   __ j(greater, &done);
   1148   __ divsd(double_scratch2, double_result);
   1149   __ movsd(double_result, double_scratch2);
   1150   // Test whether result is zero.  Bail out to check for subnormal result.
   1151   // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
   1152   __ xorps(double_scratch2, double_scratch2);
   1153   __ ucomisd(double_scratch2, double_result);
   1154   // double_exponent aliased as double_scratch2 has already been overwritten
   1155   // and may not have contained the exponent value in the first place when the
   1156   // input was a smi.  We reset it with exponent value before bailing out.
   1157   __ j(not_equal, &done);
   1158   __ Cvtlsi2sd(double_exponent, exponent);
   1159 
   1160   // Returning or bailing out.
   1161   Counters* counters = masm->isolate()->counters();
   1162   if (exponent_type_ == ON_STACK) {
   1163     // The arguments are still on the stack.
   1164     __ bind(&call_runtime);
   1165     __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1);
   1166 
   1167     // The stub is called from non-optimized code, which expects the result
   1168     // as heap number in rax.
   1169     __ bind(&done);
   1170     __ AllocateHeapNumber(rax, rcx, &call_runtime);
   1171     __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), double_result);
   1172     __ IncrementCounter(counters->math_pow(), 1);
   1173     __ ret(2 * kPointerSize);
   1174   } else {
   1175     __ bind(&call_runtime);
   1176     // Move base to the correct argument register.  Exponent is already in xmm1.
   1177     __ movsd(xmm0, double_base);
   1178     ASSERT(double_exponent.is(xmm1));
   1179     {
   1180       AllowExternalCallThatCantCauseGC scope(masm);
   1181       __ PrepareCallCFunction(2);
   1182       __ CallCFunction(
   1183           ExternalReference::power_double_double_function(masm->isolate()), 2);
   1184     }
   1185     // Return value is in xmm0.
   1186     __ movsd(double_result, xmm0);
   1187 
   1188     __ bind(&done);
   1189     __ IncrementCounter(counters->math_pow(), 1);
   1190     __ ret(0);
   1191   }
   1192 }
   1193 
   1194 
   1195 void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
   1196   Label miss;
   1197   Register receiver;
   1198   if (kind() == Code::KEYED_LOAD_IC) {
   1199     // ----------- S t a t e -------------
   1200     //  -- rax    : key
   1201     //  -- rdx    : receiver
   1202     //  -- rsp[0] : return address
   1203     // -----------------------------------
   1204     __ Cmp(rax, masm->isolate()->factory()->prototype_string());
   1205     __ j(not_equal, &miss);
   1206     receiver = rdx;
   1207   } else {
   1208     ASSERT(kind() == Code::LOAD_IC);
   1209     // ----------- S t a t e -------------
   1210     //  -- rax    : receiver
   1211     //  -- rcx    : name
   1212     //  -- rsp[0] : return address
   1213     // -----------------------------------
   1214     receiver = rax;
   1215   }
   1216 
   1217   StubCompiler::GenerateLoadFunctionPrototype(masm, receiver, r8, r9, &miss);
   1218   __ bind(&miss);
   1219   StubCompiler::TailCallBuiltin(
   1220       masm, BaseLoadStoreStubCompiler::MissBuiltin(kind()));
   1221 }
   1222 
   1223 
   1224 void StringLengthStub::Generate(MacroAssembler* masm) {
   1225   Label miss;
   1226   Register receiver;
   1227   if (kind() == Code::KEYED_LOAD_IC) {
   1228     // ----------- S t a t e -------------
   1229     //  -- rax    : key
   1230     //  -- rdx    : receiver
   1231     //  -- rsp[0] : return address
   1232     // -----------------------------------
   1233     __ Cmp(rax, masm->isolate()->factory()->length_string());
   1234     __ j(not_equal, &miss);
   1235     receiver = rdx;
   1236   } else {
   1237     ASSERT(kind() == Code::LOAD_IC);
   1238     // ----------- S t a t e -------------
   1239     //  -- rax    : receiver
   1240     //  -- rcx    : name
   1241     //  -- rsp[0] : return address
   1242     // -----------------------------------
   1243     receiver = rax;
   1244   }
   1245 
   1246   StubCompiler::GenerateLoadStringLength(masm, receiver, r8, r9, &miss);
   1247   __ bind(&miss);
   1248   StubCompiler::TailCallBuiltin(
   1249       masm, BaseLoadStoreStubCompiler::MissBuiltin(kind()));
   1250 }
   1251 
   1252 
   1253 void StoreArrayLengthStub::Generate(MacroAssembler* masm) {
   1254   // ----------- S t a t e -------------
   1255   //  -- rax    : value
   1256   //  -- rcx    : key
   1257   //  -- rdx    : receiver
   1258   //  -- rsp[0] : return address
   1259   // -----------------------------------
   1260   //
   1261   // This accepts as a receiver anything JSArray::SetElementsLength accepts
   1262   // (currently anything except for external arrays which means anything with
   1263   // elements of FixedArray type).  Value must be a number, but only smis are
   1264   // accepted as the most common case.
   1265 
   1266   Label miss;
   1267 
   1268   Register receiver = rdx;
   1269   Register value = rax;
   1270   Register scratch = rbx;
   1271   if (kind() == Code::KEYED_STORE_IC) {
   1272     __ Cmp(rcx, masm->isolate()->factory()->length_string());
   1273     __ j(not_equal, &miss);
   1274   }
   1275 
   1276   // Check that the receiver isn't a smi.
   1277   __ JumpIfSmi(receiver, &miss);
   1278 
   1279   // Check that the object is a JS array.
   1280   __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch);
   1281   __ j(not_equal, &miss);
   1282 
   1283   // Check that elements are FixedArray.
   1284   // We rely on StoreIC_ArrayLength below to deal with all types of
   1285   // fast elements (including COW).
   1286   __ movq(scratch, FieldOperand(receiver, JSArray::kElementsOffset));
   1287   __ CmpObjectType(scratch, FIXED_ARRAY_TYPE, scratch);
   1288   __ j(not_equal, &miss);
   1289 
   1290   // Check that the array has fast properties, otherwise the length
   1291   // property might have been redefined.
   1292   __ movq(scratch, FieldOperand(receiver, JSArray::kPropertiesOffset));
   1293   __ CompareRoot(FieldOperand(scratch, FixedArray::kMapOffset),
   1294                  Heap::kHashTableMapRootIndex);
   1295   __ j(equal, &miss);
   1296 
   1297   // Check that value is a smi.
   1298   __ JumpIfNotSmi(value, &miss);
   1299 
   1300   // Prepare tail call to StoreIC_ArrayLength.
   1301   __ PopReturnAddressTo(scratch);
   1302   __ push(receiver);
   1303   __ push(value);
   1304   __ PushReturnAddressFrom(scratch);
   1305 
   1306   ExternalReference ref =
   1307       ExternalReference(IC_Utility(IC::kStoreIC_ArrayLength), masm->isolate());
   1308   __ TailCallExternalReference(ref, 2, 1);
   1309 
   1310   __ bind(&miss);
   1311 
   1312   StubCompiler::TailCallBuiltin(
   1313       masm, BaseLoadStoreStubCompiler::MissBuiltin(kind()));
   1314 }
   1315 
   1316 
   1317 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
   1318   // The key is in rdx and the parameter count is in rax.
   1319 
   1320   // Check that the key is a smi.
   1321   Label slow;
   1322   __ JumpIfNotSmi(rdx, &slow);
   1323 
   1324   // Check if the calling frame is an arguments adaptor frame.  We look at the
   1325   // context offset, and if the frame is not a regular one, then we find a
   1326   // Smi instead of the context.  We can't use SmiCompare here, because that
   1327   // only works for comparing two smis.
   1328   Label adaptor;
   1329   __ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
   1330   __ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset),
   1331          Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
   1332   __ j(equal, &adaptor);
   1333 
   1334   // Check index against formal parameters count limit passed in
   1335   // through register rax. Use unsigned comparison to get negative
   1336   // check for free.
   1337   __ cmpq(rdx, rax);
   1338   __ j(above_equal, &slow);
   1339 
   1340   // Read the argument from the stack and return it.
   1341   __ SmiSub(rax, rax, rdx);
   1342   __ SmiToInteger32(rax, rax);
   1343   StackArgumentsAccessor args(rbp, rax, ARGUMENTS_DONT_CONTAIN_RECEIVER);
   1344   __ movq(rax, args.GetArgumentOperand(0));
   1345   __ Ret();
   1346 
   1347   // Arguments adaptor case: Check index against actual arguments
   1348   // limit found in the arguments adaptor frame. Use unsigned
   1349   // comparison to get negative check for free.
   1350   __ bind(&adaptor);
   1351   __ movq(rcx, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
   1352   __ cmpq(rdx, rcx);
   1353   __ j(above_equal, &slow);
   1354 
   1355   // Read the argument from the stack and return it.
   1356   __ SmiSub(rcx, rcx, rdx);
   1357   __ SmiToInteger32(rcx, rcx);
   1358   StackArgumentsAccessor adaptor_args(rbx, rcx,
   1359                                       ARGUMENTS_DONT_CONTAIN_RECEIVER);
   1360   __ movq(rax, adaptor_args.GetArgumentOperand(0));
   1361   __ Ret();
   1362 
   1363   // Slow-case: Handle non-smi or out-of-bounds access to arguments
   1364   // by calling the runtime system.
   1365   __ bind(&slow);
   1366   __ PopReturnAddressTo(rbx);
   1367   __ push(rdx);
   1368   __ PushReturnAddressFrom(rbx);
   1369   __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
   1370 }
   1371 
   1372 
   1373 void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
   1374   // Stack layout:
   1375   //  rsp[0]  : return address
   1376   //  rsp[8]  : number of parameters (tagged)
   1377   //  rsp[16] : receiver displacement
   1378   //  rsp[24] : function
   1379   // Registers used over the whole function:
   1380   //  rbx: the mapped parameter count (untagged)
   1381   //  rax: the allocated object (tagged).
   1382 
   1383   Factory* factory = masm->isolate()->factory();
   1384 
   1385   StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER);
   1386   __ SmiToInteger64(rbx, args.GetArgumentOperand(2));
   1387   // rbx = parameter count (untagged)
   1388 
   1389   // Check if the calling frame is an arguments adaptor frame.
   1390   Label runtime;
   1391   Label adaptor_frame, try_allocate;
   1392   __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
   1393   __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset));
   1394   __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
   1395   __ j(equal, &adaptor_frame);
   1396 
   1397   // No adaptor, parameter count = argument count.
   1398   __ movq(rcx, rbx);
   1399   __ jmp(&try_allocate, Label::kNear);
   1400 
   1401   // We have an adaptor frame. Patch the parameters pointer.
   1402   __ bind(&adaptor_frame);
   1403   __ SmiToInteger64(rcx,
   1404                     Operand(rdx,
   1405                             ArgumentsAdaptorFrameConstants::kLengthOffset));
   1406   __ lea(rdx, Operand(rdx, rcx, times_pointer_size,
   1407                       StandardFrameConstants::kCallerSPOffset));
   1408   __ movq(args.GetArgumentOperand(1), rdx);
   1409 
   1410   // rbx = parameter count (untagged)
   1411   // rcx = argument count (untagged)
   1412   // Compute the mapped parameter count = min(rbx, rcx) in rbx.
   1413   __ cmpq(rbx, rcx);
   1414   __ j(less_equal, &try_allocate, Label::kNear);
   1415   __ movq(rbx, rcx);
   1416 
   1417   __ bind(&try_allocate);
   1418 
   1419   // Compute the sizes of backing store, parameter map, and arguments object.
   1420   // 1. Parameter map, has 2 extra words containing context and backing store.
   1421   const int kParameterMapHeaderSize =
   1422       FixedArray::kHeaderSize + 2 * kPointerSize;
   1423   Label no_parameter_map;
   1424   __ xor_(r8, r8);
   1425   __ testq(rbx, rbx);
   1426   __ j(zero, &no_parameter_map, Label::kNear);
   1427   __ lea(r8, Operand(rbx, times_pointer_size, kParameterMapHeaderSize));
   1428   __ bind(&no_parameter_map);
   1429 
   1430   // 2. Backing store.
   1431   __ lea(r8, Operand(r8, rcx, times_pointer_size, FixedArray::kHeaderSize));
   1432 
   1433   // 3. Arguments object.
   1434   __ addq(r8, Immediate(Heap::kArgumentsObjectSize));
   1435 
   1436   // Do the allocation of all three objects in one go.
   1437   __ Allocate(r8, rax, rdx, rdi, &runtime, TAG_OBJECT);
   1438 
   1439   // rax = address of new object(s) (tagged)
   1440   // rcx = argument count (untagged)
   1441   // Get the arguments boilerplate from the current native context into rdi.
   1442   Label has_mapped_parameters, copy;
   1443   __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
   1444   __ movq(rdi, FieldOperand(rdi, GlobalObject::kNativeContextOffset));
   1445   __ testq(rbx, rbx);
   1446   __ j(not_zero, &has_mapped_parameters, Label::kNear);
   1447 
   1448   const int kIndex = Context::ARGUMENTS_BOILERPLATE_INDEX;
   1449   __ movq(rdi, Operand(rdi, Context::SlotOffset(kIndex)));
   1450   __ jmp(&copy, Label::kNear);
   1451 
   1452   const int kAliasedIndex = Context::ALIASED_ARGUMENTS_BOILERPLATE_INDEX;
   1453   __ bind(&has_mapped_parameters);
   1454   __ movq(rdi, Operand(rdi, Context::SlotOffset(kAliasedIndex)));
   1455   __ bind(&copy);
   1456 
   1457   // rax = address of new object (tagged)
   1458   // rbx = mapped parameter count (untagged)
   1459   // rcx = argument count (untagged)
   1460   // rdi = address of boilerplate object (tagged)
   1461   // Copy the JS object part.
   1462   for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
   1463     __ movq(rdx, FieldOperand(rdi, i));
   1464     __ movq(FieldOperand(rax, i), rdx);
   1465   }
   1466 
   1467   // Set up the callee in-object property.
   1468   STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1);
   1469   __ movq(rdx, args.GetArgumentOperand(0));
   1470   __ movq(FieldOperand(rax, JSObject::kHeaderSize +
   1471                        Heap::kArgumentsCalleeIndex * kPointerSize),
   1472           rdx);
   1473 
   1474   // Use the length (smi tagged) and set that as an in-object property too.
   1475   // Note: rcx is tagged from here on.
   1476   STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
   1477   __ Integer32ToSmi(rcx, rcx);
   1478   __ movq(FieldOperand(rax, JSObject::kHeaderSize +
   1479                        Heap::kArgumentsLengthIndex * kPointerSize),
   1480           rcx);
   1481 
   1482   // Set up the elements pointer in the allocated arguments object.
   1483   // If we allocated a parameter map, edi will point there, otherwise to the
   1484   // backing store.
   1485   __ lea(rdi, Operand(rax, Heap::kArgumentsObjectSize));
   1486   __ movq(FieldOperand(rax, JSObject::kElementsOffset), rdi);
   1487 
   1488   // rax = address of new object (tagged)
   1489   // rbx = mapped parameter count (untagged)
   1490   // rcx = argument count (tagged)
   1491   // rdi = address of parameter map or backing store (tagged)
   1492 
   1493   // Initialize parameter map. If there are no mapped arguments, we're done.
   1494   Label skip_parameter_map;
   1495   __ testq(rbx, rbx);
   1496   __ j(zero, &skip_parameter_map);
   1497 
   1498   __ LoadRoot(kScratchRegister, Heap::kNonStrictArgumentsElementsMapRootIndex);
   1499   // rbx contains the untagged argument count. Add 2 and tag to write.
   1500   __ movq(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister);
   1501   __ Integer64PlusConstantToSmi(r9, rbx, 2);
   1502   __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), r9);
   1503   __ movq(FieldOperand(rdi, FixedArray::kHeaderSize + 0 * kPointerSize), rsi);
   1504   __ lea(r9, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize));
   1505   __ movq(FieldOperand(rdi, FixedArray::kHeaderSize + 1 * kPointerSize), r9);
   1506 
   1507   // Copy the parameter slots and the holes in the arguments.
   1508   // We need to fill in mapped_parameter_count slots. They index the context,
   1509   // where parameters are stored in reverse order, at
   1510   //   MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
   1511   // The mapped parameter thus need to get indices
   1512   //   MIN_CONTEXT_SLOTS+parameter_count-1 ..
   1513   //       MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
   1514   // We loop from right to left.
   1515   Label parameters_loop, parameters_test;
   1516 
   1517   // Load tagged parameter count into r9.
   1518   __ Integer32ToSmi(r9, rbx);
   1519   __ Move(r8, Smi::FromInt(Context::MIN_CONTEXT_SLOTS));
   1520   __ addq(r8, args.GetArgumentOperand(2));
   1521   __ subq(r8, r9);
   1522   __ Move(r11, factory->the_hole_value());
   1523   __ movq(rdx, rdi);
   1524   __ lea(rdi, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize));
   1525   // r9 = loop variable (tagged)
   1526   // r8 = mapping index (tagged)
   1527   // r11 = the hole value
   1528   // rdx = address of parameter map (tagged)
   1529   // rdi = address of backing store (tagged)
   1530   __ jmp(&parameters_test, Label::kNear);
   1531 
   1532   __ bind(&parameters_loop);
   1533   __ SmiSubConstant(r9, r9, Smi::FromInt(1));
   1534   __ SmiToInteger64(kScratchRegister, r9);
   1535   __ movq(FieldOperand(rdx, kScratchRegister,
   1536                        times_pointer_size,
   1537                        kParameterMapHeaderSize),
   1538           r8);
   1539   __ movq(FieldOperand(rdi, kScratchRegister,
   1540                        times_pointer_size,
   1541                        FixedArray::kHeaderSize),
   1542           r11);
   1543   __ SmiAddConstant(r8, r8, Smi::FromInt(1));
   1544   __ bind(&parameters_test);
   1545   __ SmiTest(r9);
   1546   __ j(not_zero, &parameters_loop, Label::kNear);
   1547 
   1548   __ bind(&skip_parameter_map);
   1549 
   1550   // rcx = argument count (tagged)
   1551   // rdi = address of backing store (tagged)
   1552   // Copy arguments header and remaining slots (if there are any).
   1553   __ Move(FieldOperand(rdi, FixedArray::kMapOffset),
   1554           factory->fixed_array_map());
   1555   __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx);
   1556 
   1557   Label arguments_loop, arguments_test;
   1558   __ movq(r8, rbx);
   1559   __ movq(rdx, args.GetArgumentOperand(1));
   1560   // Untag rcx for the loop below.
   1561   __ SmiToInteger64(rcx, rcx);
   1562   __ lea(kScratchRegister, Operand(r8, times_pointer_size, 0));
   1563   __ subq(rdx, kScratchRegister);
   1564   __ jmp(&arguments_test, Label::kNear);
   1565 
   1566   __ bind(&arguments_loop);
   1567   __ subq(rdx, Immediate(kPointerSize));
   1568   __ movq(r9, Operand(rdx, 0));
   1569   __ movq(FieldOperand(rdi, r8,
   1570                        times_pointer_size,
   1571                        FixedArray::kHeaderSize),
   1572           r9);
   1573   __ addq(r8, Immediate(1));
   1574 
   1575   __ bind(&arguments_test);
   1576   __ cmpq(r8, rcx);
   1577   __ j(less, &arguments_loop, Label::kNear);
   1578 
   1579   // Return and remove the on-stack parameters.
   1580   __ ret(3 * kPointerSize);
   1581 
   1582   // Do the runtime call to allocate the arguments object.
   1583   // rcx = argument count (untagged)
   1584   __ bind(&runtime);
   1585   __ Integer32ToSmi(rcx, rcx);
   1586   __ movq(args.GetArgumentOperand(2), rcx);  // Patch argument count.
   1587   __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
   1588 }
   1589 
   1590 
   1591 void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) {
   1592   // rsp[0]  : return address
   1593   // rsp[8]  : number of parameters
   1594   // rsp[16] : receiver displacement
   1595   // rsp[24] : function
   1596 
   1597   // Check if the calling frame is an arguments adaptor frame.
   1598   Label runtime;
   1599   __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
   1600   __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset));
   1601   __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
   1602   __ j(not_equal, &runtime);
   1603 
   1604   // Patch the arguments.length and the parameters pointer.
   1605   StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER);
   1606   __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset));
   1607   __ movq(args.GetArgumentOperand(2), rcx);
   1608   __ SmiToInteger64(rcx, rcx);
   1609   __ lea(rdx, Operand(rdx, rcx, times_pointer_size,
   1610               StandardFrameConstants::kCallerSPOffset));
   1611   __ movq(args.GetArgumentOperand(1), rdx);
   1612 
   1613   __ bind(&runtime);
   1614   __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
   1615 }
   1616 
   1617 
   1618 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
   1619   // rsp[0]  : return address
   1620   // rsp[8]  : number of parameters
   1621   // rsp[16] : receiver displacement
   1622   // rsp[24] : function
   1623 
   1624   // Check if the calling frame is an arguments adaptor frame.
   1625   Label adaptor_frame, try_allocate, runtime;
   1626   __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
   1627   __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset));
   1628   __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
   1629   __ j(equal, &adaptor_frame);
   1630 
   1631   // Get the length from the frame.
   1632   StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER);
   1633   __ movq(rcx, args.GetArgumentOperand(2));
   1634   __ SmiToInteger64(rcx, rcx);
   1635   __ jmp(&try_allocate);
   1636 
   1637   // Patch the arguments.length and the parameters pointer.
   1638   __ bind(&adaptor_frame);
   1639   __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset));
   1640   __ movq(args.GetArgumentOperand(2), rcx);
   1641   __ SmiToInteger64(rcx, rcx);
   1642   __ lea(rdx, Operand(rdx, rcx, times_pointer_size,
   1643                       StandardFrameConstants::kCallerSPOffset));
   1644   __ movq(args.GetArgumentOperand(1), rdx);
   1645 
   1646   // Try the new space allocation. Start out with computing the size of
   1647   // the arguments object and the elements array.
   1648   Label add_arguments_object;
   1649   __ bind(&try_allocate);
   1650   __ testq(rcx, rcx);
   1651   __ j(zero, &add_arguments_object, Label::kNear);
   1652   __ lea(rcx, Operand(rcx, times_pointer_size, FixedArray::kHeaderSize));
   1653   __ bind(&add_arguments_object);
   1654   __ addq(rcx, Immediate(Heap::kArgumentsObjectSizeStrict));
   1655 
   1656   // Do the allocation of both objects in one go.
   1657   __ Allocate(rcx, rax, rdx, rbx, &runtime, TAG_OBJECT);
   1658 
   1659   // Get the arguments boilerplate from the current native context.
   1660   __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
   1661   __ movq(rdi, FieldOperand(rdi, GlobalObject::kNativeContextOffset));
   1662   const int offset =
   1663       Context::SlotOffset(Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX);
   1664   __ movq(rdi, Operand(rdi, offset));
   1665 
   1666   // Copy the JS object part.
   1667   for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
   1668     __ movq(rbx, FieldOperand(rdi, i));
   1669     __ movq(FieldOperand(rax, i), rbx);
   1670   }
   1671 
   1672   // Get the length (smi tagged) and set that as an in-object property too.
   1673   STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
   1674   __ movq(rcx, args.GetArgumentOperand(2));
   1675   __ movq(FieldOperand(rax, JSObject::kHeaderSize +
   1676                        Heap::kArgumentsLengthIndex * kPointerSize),
   1677           rcx);
   1678 
   1679   // If there are no actual arguments, we're done.
   1680   Label done;
   1681   __ testq(rcx, rcx);
   1682   __ j(zero, &done);
   1683 
   1684   // Get the parameters pointer from the stack.
   1685   __ movq(rdx, args.GetArgumentOperand(1));
   1686 
   1687   // Set up the elements pointer in the allocated arguments object and
   1688   // initialize the header in the elements fixed array.
   1689   __ lea(rdi, Operand(rax, Heap::kArgumentsObjectSizeStrict));
   1690   __ movq(FieldOperand(rax, JSObject::kElementsOffset), rdi);
   1691   __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex);
   1692   __ movq(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister);
   1693 
   1694 
   1695   __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx);
   1696   // Untag the length for the loop below.
   1697   __ SmiToInteger64(rcx, rcx);
   1698 
   1699   // Copy the fixed array slots.
   1700   Label loop;
   1701   __ bind(&loop);
   1702   __ movq(rbx, Operand(rdx, -1 * kPointerSize));  // Skip receiver.
   1703   __ movq(FieldOperand(rdi, FixedArray::kHeaderSize), rbx);
   1704   __ addq(rdi, Immediate(kPointerSize));
   1705   __ subq(rdx, Immediate(kPointerSize));
   1706   __ decq(rcx);
   1707   __ j(not_zero, &loop);
   1708 
   1709   // Return and remove the on-stack parameters.
   1710   __ bind(&done);
   1711   __ ret(3 * kPointerSize);
   1712 
   1713   // Do the runtime call to allocate the arguments object.
   1714   __ bind(&runtime);
   1715   __ TailCallRuntime(Runtime::kNewStrictArgumentsFast, 3, 1);
   1716 }
   1717 
   1718 
   1719 void RegExpExecStub::Generate(MacroAssembler* masm) {
   1720   // Just jump directly to runtime if native RegExp is not selected at compile
   1721   // time or if regexp entry in generated code is turned off runtime switch or
   1722   // at compilation.
   1723 #ifdef V8_INTERPRETED_REGEXP
   1724   __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
   1725 #else  // V8_INTERPRETED_REGEXP
   1726 
   1727   // Stack frame on entry.
   1728   //  rsp[0]  : return address
   1729   //  rsp[8]  : last_match_info (expected JSArray)
   1730   //  rsp[16] : previous index
   1731   //  rsp[24] : subject string
   1732   //  rsp[32] : JSRegExp object
   1733 
   1734   enum RegExpExecStubArgumentIndices {
   1735     JS_REG_EXP_OBJECT_ARGUMENT_INDEX,
   1736     SUBJECT_STRING_ARGUMENT_INDEX,
   1737     PREVIOUS_INDEX_ARGUMENT_INDEX,
   1738     LAST_MATCH_INFO_ARGUMENT_INDEX,
   1739     REG_EXP_EXEC_ARGUMENT_COUNT
   1740   };
   1741 
   1742   StackArgumentsAccessor args(rsp, REG_EXP_EXEC_ARGUMENT_COUNT,
   1743                               ARGUMENTS_DONT_CONTAIN_RECEIVER);
   1744   Label runtime;
   1745   // Ensure that a RegExp stack is allocated.
   1746   Isolate* isolate = masm->isolate();
   1747   ExternalReference address_of_regexp_stack_memory_address =
   1748       ExternalReference::address_of_regexp_stack_memory_address(isolate);
   1749   ExternalReference address_of_regexp_stack_memory_size =
   1750       ExternalReference::address_of_regexp_stack_memory_size(isolate);
   1751   __ Load(kScratchRegister, address_of_regexp_stack_memory_size);
   1752   __ testq(kScratchRegister, kScratchRegister);
   1753   __ j(zero, &runtime);
   1754 
   1755   // Check that the first argument is a JSRegExp object.
   1756   __ movq(rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX));
   1757   __ JumpIfSmi(rax, &runtime);
   1758   __ CmpObjectType(rax, JS_REGEXP_TYPE, kScratchRegister);
   1759   __ j(not_equal, &runtime);
   1760 
   1761   // Check that the RegExp has been compiled (data contains a fixed array).
   1762   __ movq(rax, FieldOperand(rax, JSRegExp::kDataOffset));
   1763   if (FLAG_debug_code) {
   1764     Condition is_smi = masm->CheckSmi(rax);
   1765     __ Check(NegateCondition(is_smi),
   1766         kUnexpectedTypeForRegExpDataFixedArrayExpected);
   1767     __ CmpObjectType(rax, FIXED_ARRAY_TYPE, kScratchRegister);
   1768     __ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected);
   1769   }
   1770 
   1771   // rax: RegExp data (FixedArray)
   1772   // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
   1773   __ SmiToInteger32(rbx, FieldOperand(rax, JSRegExp::kDataTagOffset));
   1774   __ cmpl(rbx, Immediate(JSRegExp::IRREGEXP));
   1775   __ j(not_equal, &runtime);
   1776 
   1777   // rax: RegExp data (FixedArray)
   1778   // Check that the number of captures fit in the static offsets vector buffer.
   1779   __ SmiToInteger32(rdx,
   1780                     FieldOperand(rax, JSRegExp::kIrregexpCaptureCountOffset));
   1781   // Check (number_of_captures + 1) * 2 <= offsets vector size
   1782   // Or              number_of_captures <= offsets vector size / 2 - 1
   1783   STATIC_ASSERT(Isolate::kJSRegexpStaticOffsetsVectorSize >= 2);
   1784   __ cmpl(rdx, Immediate(Isolate::kJSRegexpStaticOffsetsVectorSize / 2 - 1));
   1785   __ j(above, &runtime);
   1786 
   1787   // Reset offset for possibly sliced string.
   1788   __ Set(r14, 0);
   1789   __ movq(rdi, args.GetArgumentOperand(SUBJECT_STRING_ARGUMENT_INDEX));
   1790   __ JumpIfSmi(rdi, &runtime);
   1791   __ movq(r15, rdi);  // Make a copy of the original subject string.
   1792   __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
   1793   __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
   1794   // rax: RegExp data (FixedArray)
   1795   // rdi: subject string
   1796   // r15: subject string
   1797   // Handle subject string according to its encoding and representation:
   1798   // (1) Sequential two byte?  If yes, go to (9).
   1799   // (2) Sequential one byte?  If yes, go to (6).
   1800   // (3) Anything but sequential or cons?  If yes, go to (7).
   1801   // (4) Cons string.  If the string is flat, replace subject with first string.
   1802   //     Otherwise bailout.
   1803   // (5a) Is subject sequential two byte?  If yes, go to (9).
   1804   // (5b) Is subject external?  If yes, go to (8).
   1805   // (6) One byte sequential.  Load regexp code for one byte.
   1806   // (E) Carry on.
   1807   /// [...]
   1808 
   1809   // Deferred code at the end of the stub:
   1810   // (7) Not a long external string?  If yes, go to (10).
   1811   // (8) External string.  Make it, offset-wise, look like a sequential string.
   1812   // (8a) Is the external string one byte?  If yes, go to (6).
   1813   // (9) Two byte sequential.  Load regexp code for one byte. Go to (E).
   1814   // (10) Short external string or not a string?  If yes, bail out to runtime.
   1815   // (11) Sliced string.  Replace subject with parent. Go to (5a).
   1816 
   1817   Label seq_one_byte_string /* 6 */, seq_two_byte_string /* 9 */,
   1818         external_string /* 8 */, check_underlying /* 5a */,
   1819         not_seq_nor_cons /* 7 */, check_code /* E */,
   1820         not_long_external /* 10 */;
   1821 
   1822   // (1) Sequential two byte?  If yes, go to (9).
   1823   __ andb(rbx, Immediate(kIsNotStringMask |
   1824                          kStringRepresentationMask |
   1825                          kStringEncodingMask |
   1826                          kShortExternalStringMask));
   1827   STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0);
   1828   __ j(zero, &seq_two_byte_string);  // Go to (9).
   1829 
   1830   // (2) Sequential one byte?  If yes, go to (6).
   1831   // Any other sequential string must be one byte.
   1832   __ andb(rbx, Immediate(kIsNotStringMask |
   1833                          kStringRepresentationMask |
   1834                          kShortExternalStringMask));
   1835   __ j(zero, &seq_one_byte_string, Label::kNear);  // Go to (6).
   1836 
   1837   // (3) Anything but sequential or cons?  If yes, go to (7).
   1838   // We check whether the subject string is a cons, since sequential strings
   1839   // have already been covered.
   1840   STATIC_ASSERT(kConsStringTag < kExternalStringTag);
   1841   STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
   1842   STATIC_ASSERT(kIsNotStringMask > kExternalStringTag);
   1843   STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag);
   1844   __ cmpq(rbx, Immediate(kExternalStringTag));
   1845   __ j(greater_equal, &not_seq_nor_cons);  // Go to (7).
   1846 
   1847   // (4) Cons string.  Check that it's flat.
   1848   // Replace subject with first string and reload instance type.
   1849   __ CompareRoot(FieldOperand(rdi, ConsString::kSecondOffset),
   1850                  Heap::kempty_stringRootIndex);
   1851   __ j(not_equal, &runtime);
   1852   __ movq(rdi, FieldOperand(rdi, ConsString::kFirstOffset));
   1853   __ bind(&check_underlying);
   1854   __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
   1855   __ movq(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
   1856 
   1857   // (5a) Is subject sequential two byte?  If yes, go to (9).
   1858   __ testb(rbx, Immediate(kStringRepresentationMask | kStringEncodingMask));
   1859   STATIC_ASSERT((kSeqStringTag | kTwoByteStringTag) == 0);
   1860   __ j(zero, &seq_two_byte_string);  // Go to (9).
   1861   // (5b) Is subject external?  If yes, go to (8).
   1862   __ testb(rbx, Immediate(kStringRepresentationMask));
   1863   // The underlying external string is never a short external string.
   1864   STATIC_CHECK(ExternalString::kMaxShortLength < ConsString::kMinLength);
   1865   STATIC_CHECK(ExternalString::kMaxShortLength < SlicedString::kMinLength);
   1866   __ j(not_zero, &external_string);  // Go to (8)
   1867 
   1868   // (6) One byte sequential.  Load regexp code for one byte.
   1869   __ bind(&seq_one_byte_string);
   1870   // rax: RegExp data (FixedArray)
   1871   __ movq(r11, FieldOperand(rax, JSRegExp::kDataAsciiCodeOffset));
   1872   __ Set(rcx, 1);  // Type is one byte.
   1873 
   1874   // (E) Carry on.  String handling is done.
   1875   __ bind(&check_code);
   1876   // r11: irregexp code
   1877   // Check that the irregexp code has been generated for the actual string
   1878   // encoding. If it has, the field contains a code object otherwise it contains
   1879   // smi (code flushing support)
   1880   __ JumpIfSmi(r11, &runtime);
   1881 
   1882   // rdi: sequential subject string (or look-alike, external string)
   1883   // r15: original subject string
   1884   // rcx: encoding of subject string (1 if ASCII, 0 if two_byte);
   1885   // r11: code
   1886   // Load used arguments before starting to push arguments for call to native
   1887   // RegExp code to avoid handling changing stack height.
   1888   // We have to use r15 instead of rdi to load the length because rdi might
   1889   // have been only made to look like a sequential string when it actually
   1890   // is an external string.
   1891   __ movq(rbx, args.GetArgumentOperand(PREVIOUS_INDEX_ARGUMENT_INDEX));
   1892   __ JumpIfNotSmi(rbx, &runtime);
   1893   __ SmiCompare(rbx, FieldOperand(r15, String::kLengthOffset));
   1894   __ j(above_equal, &runtime);
   1895   __ SmiToInteger64(rbx, rbx);
   1896 
   1897   // rdi: subject string
   1898   // rbx: previous index
   1899   // rcx: encoding of subject string (1 if ASCII 0 if two_byte);
   1900   // r11: code
   1901   // All checks done. Now push arguments for native regexp code.
   1902   Counters* counters = masm->isolate()->counters();
   1903   __ IncrementCounter(counters->regexp_entry_native(), 1);
   1904 
   1905   // Isolates: note we add an additional parameter here (isolate pointer).
   1906   static const int kRegExpExecuteArguments = 9;
   1907   int argument_slots_on_stack =
   1908       masm->ArgumentStackSlotsForCFunctionCall(kRegExpExecuteArguments);
   1909   __ EnterApiExitFrame(argument_slots_on_stack);
   1910 
   1911   // Argument 9: Pass current isolate address.
   1912   __ LoadAddress(kScratchRegister,
   1913                  ExternalReference::isolate_address(masm->isolate()));
   1914   __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kPointerSize),
   1915           kScratchRegister);
   1916 
   1917   // Argument 8: Indicate that this is a direct call from JavaScript.
   1918   __ movq(Operand(rsp, (argument_slots_on_stack - 2) * kPointerSize),
   1919           Immediate(1));
   1920 
   1921   // Argument 7: Start (high end) of backtracking stack memory area.
   1922   __ Move(kScratchRegister, address_of_regexp_stack_memory_address);
   1923   __ movq(r9, Operand(kScratchRegister, 0));
   1924   __ Move(kScratchRegister, address_of_regexp_stack_memory_size);
   1925   __ addq(r9, Operand(kScratchRegister, 0));
   1926   __ movq(Operand(rsp, (argument_slots_on_stack - 3) * kPointerSize), r9);
   1927 
   1928   // Argument 6: Set the number of capture registers to zero to force global
   1929   // regexps to behave as non-global.  This does not affect non-global regexps.
   1930   // Argument 6 is passed in r9 on Linux and on the stack on Windows.
   1931 #ifdef _WIN64
   1932   __ movq(Operand(rsp, (argument_slots_on_stack - 4) * kPointerSize),
   1933           Immediate(0));
   1934 #else
   1935   __ Set(r9, 0);
   1936 #endif
   1937 
   1938   // Argument 5: static offsets vector buffer.
   1939   __ LoadAddress(r8,
   1940                  ExternalReference::address_of_static_offsets_vector(isolate));
   1941   // Argument 5 passed in r8 on Linux and on the stack on Windows.
   1942 #ifdef _WIN64
   1943   __ movq(Operand(rsp, (argument_slots_on_stack - 5) * kPointerSize), r8);
   1944 #endif
   1945 
   1946   // rdi: subject string
   1947   // rbx: previous index
   1948   // rcx: encoding of subject string (1 if ASCII 0 if two_byte);
   1949   // r11: code
   1950   // r14: slice offset
   1951   // r15: original subject string
   1952 
   1953   // Argument 2: Previous index.
   1954   __ movq(arg_reg_2, rbx);
   1955 
   1956   // Argument 4: End of string data
   1957   // Argument 3: Start of string data
   1958   Label setup_two_byte, setup_rest, got_length, length_not_from_slice;
   1959   // Prepare start and end index of the input.
   1960   // Load the length from the original sliced string if that is the case.
   1961   __ addq(rbx, r14);
   1962   __ SmiToInteger32(arg_reg_3, FieldOperand(r15, String::kLengthOffset));
   1963   __ addq(r14, arg_reg_3);  // Using arg3 as scratch.
   1964 
   1965   // rbx: start index of the input
   1966   // r14: end index of the input
   1967   // r15: original subject string
   1968   __ testb(rcx, rcx);  // Last use of rcx as encoding of subject string.
   1969   __ j(zero, &setup_two_byte, Label::kNear);
   1970   __ lea(arg_reg_4,
   1971          FieldOperand(rdi, r14, times_1, SeqOneByteString::kHeaderSize));
   1972   __ lea(arg_reg_3,
   1973          FieldOperand(rdi, rbx, times_1, SeqOneByteString::kHeaderSize));
   1974   __ jmp(&setup_rest, Label::kNear);
   1975   __ bind(&setup_two_byte);
   1976   __ lea(arg_reg_4,
   1977          FieldOperand(rdi, r14, times_2, SeqTwoByteString::kHeaderSize));
   1978   __ lea(arg_reg_3,
   1979          FieldOperand(rdi, rbx, times_2, SeqTwoByteString::kHeaderSize));
   1980   __ bind(&setup_rest);
   1981 
   1982   // Argument 1: Original subject string.
   1983   // The original subject is in the previous stack frame. Therefore we have to
   1984   // use rbp, which points exactly to one pointer size below the previous rsp.
   1985   // (Because creating a new stack frame pushes the previous rbp onto the stack
   1986   // and thereby moves up rsp by one kPointerSize.)
   1987   __ movq(arg_reg_1, r15);
   1988 
   1989   // Locate the code entry and call it.
   1990   __ addq(r11, Immediate(Code::kHeaderSize - kHeapObjectTag));
   1991   __ call(r11);
   1992 
   1993   __ LeaveApiExitFrame(true);
   1994 
   1995   // Check the result.
   1996   Label success;
   1997   Label exception;
   1998   __ cmpl(rax, Immediate(1));
   1999   // We expect exactly one result since we force the called regexp to behave
   2000   // as non-global.
   2001   __ j(equal, &success, Label::kNear);
   2002   __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::EXCEPTION));
   2003   __ j(equal, &exception);
   2004   __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::FAILURE));
   2005   // If none of the above, it can only be retry.
   2006   // Handle that in the runtime system.
   2007   __ j(not_equal, &runtime);
   2008 
   2009   // For failure return null.
   2010   __ LoadRoot(rax, Heap::kNullValueRootIndex);
   2011   __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize);
   2012 
   2013   // Load RegExp data.
   2014   __ bind(&success);
   2015   __ movq(rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX));
   2016   __ movq(rcx, FieldOperand(rax, JSRegExp::kDataOffset));
   2017   __ SmiToInteger32(rax,
   2018                     FieldOperand(rcx, JSRegExp::kIrregexpCaptureCountOffset));
   2019   // Calculate number of capture registers (number_of_captures + 1) * 2.
   2020   __ leal(rdx, Operand(rax, rax, times_1, 2));
   2021 
   2022   // rdx: Number of capture registers
   2023   // Check that the fourth object is a JSArray object.
   2024   __ movq(r15, args.GetArgumentOperand(LAST_MATCH_INFO_ARGUMENT_INDEX));
   2025   __ JumpIfSmi(r15, &runtime);
   2026   __ CmpObjectType(r15, JS_ARRAY_TYPE, kScratchRegister);
   2027   __ j(not_equal, &runtime);
   2028   // Check that the JSArray is in fast case.
   2029   __ movq(rbx, FieldOperand(r15, JSArray::kElementsOffset));
   2030   __ movq(rax, FieldOperand(rbx, HeapObject::kMapOffset));
   2031   __ CompareRoot(rax, Heap::kFixedArrayMapRootIndex);
   2032   __ j(not_equal, &runtime);
   2033   // Check that the last match info has space for the capture registers and the
   2034   // additional information. Ensure no overflow in add.
   2035   STATIC_ASSERT(FixedArray::kMaxLength < kMaxInt - FixedArray::kLengthOffset);
   2036   __ SmiToInteger32(rax, FieldOperand(rbx, FixedArray::kLengthOffset));
   2037   __ subl(rax, Immediate(RegExpImpl::kLastMatchOverhead));
   2038   __ cmpl(rdx, rax);
   2039   __ j(greater, &runtime);
   2040 
   2041   // rbx: last_match_info backing store (FixedArray)
   2042   // rdx: number of capture registers
   2043   // Store the capture count.
   2044   __ Integer32ToSmi(kScratchRegister, rdx);
   2045   __ movq(FieldOperand(rbx, RegExpImpl::kLastCaptureCountOffset),
   2046           kScratchRegister);
   2047   // Store last subject and last input.
   2048   __ movq(rax, args.GetArgumentOperand(SUBJECT_STRING_ARGUMENT_INDEX));
   2049   __ movq(FieldOperand(rbx, RegExpImpl::kLastSubjectOffset), rax);
   2050   __ movq(rcx, rax);
   2051   __ RecordWriteField(rbx,
   2052                       RegExpImpl::kLastSubjectOffset,
   2053                       rax,
   2054                       rdi,
   2055                       kDontSaveFPRegs);
   2056   __ movq(rax, rcx);
   2057   __ movq(FieldOperand(rbx, RegExpImpl::kLastInputOffset), rax);
   2058   __ RecordWriteField(rbx,
   2059                       RegExpImpl::kLastInputOffset,
   2060                       rax,
   2061                       rdi,
   2062                       kDontSaveFPRegs);
   2063 
   2064   // Get the static offsets vector filled by the native regexp code.
   2065   __ LoadAddress(rcx,
   2066                  ExternalReference::address_of_static_offsets_vector(isolate));
   2067 
   2068   // rbx: last_match_info backing store (FixedArray)
   2069   // rcx: offsets vector
   2070   // rdx: number of capture registers
   2071   Label next_capture, done;
   2072   // Capture register counter starts from number of capture registers and
   2073   // counts down until wraping after zero.
   2074   __ bind(&next_capture);
   2075   __ subq(rdx, Immediate(1));
   2076   __ j(negative, &done, Label::kNear);
   2077   // Read the value from the static offsets vector buffer and make it a smi.
   2078   __ movl(rdi, Operand(rcx, rdx, times_int_size, 0));
   2079   __ Integer32ToSmi(rdi, rdi);
   2080   // Store the smi value in the last match info.
   2081   __ movq(FieldOperand(rbx,
   2082                        rdx,
   2083                        times_pointer_size,
   2084                        RegExpImpl::kFirstCaptureOffset),
   2085           rdi);
   2086   __ jmp(&next_capture);
   2087   __ bind(&done);
   2088 
   2089   // Return last match info.
   2090   __ movq(rax, r15);
   2091   __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize);
   2092 
   2093   __ bind(&exception);
   2094   // Result must now be exception. If there is no pending exception already a
   2095   // stack overflow (on the backtrack stack) was detected in RegExp code but
   2096   // haven't created the exception yet. Handle that in the runtime system.
   2097   // TODO(592): Rerunning the RegExp to get the stack overflow exception.
   2098   ExternalReference pending_exception_address(
   2099       Isolate::kPendingExceptionAddress, isolate);
   2100   Operand pending_exception_operand =
   2101       masm->ExternalOperand(pending_exception_address, rbx);
   2102   __ movq(rax, pending_exception_operand);
   2103   __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex);
   2104   __ cmpq(rax, rdx);
   2105   __ j(equal, &runtime);
   2106   __ movq(pending_exception_operand, rdx);
   2107 
   2108   __ CompareRoot(rax, Heap::kTerminationExceptionRootIndex);
   2109   Label termination_exception;
   2110   __ j(equal, &termination_exception, Label::kNear);
   2111   __ Throw(rax);
   2112 
   2113   __ bind(&termination_exception);
   2114   __ ThrowUncatchable(rax);
   2115 
   2116   // Do the runtime call to execute the regexp.
   2117   __ bind(&runtime);
   2118   __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
   2119 
   2120   // Deferred code for string handling.
   2121   // (7) Not a long external string?  If yes, go to (10).
   2122   __ bind(&not_seq_nor_cons);
   2123   // Compare flags are still set from (3).
   2124   __ j(greater, &not_long_external, Label::kNear);  // Go to (10).
   2125 
   2126   // (8) External string.  Short external strings have been ruled out.
   2127   __ bind(&external_string);
   2128   __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
   2129   __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
   2130   if (FLAG_debug_code) {
   2131     // Assert that we do not have a cons or slice (indirect strings) here.
   2132     // Sequential strings have already been ruled out.
   2133     __ testb(rbx, Immediate(kIsIndirectStringMask));
   2134     __ Assert(zero, kExternalStringExpectedButNotFound);
   2135   }
   2136   __ movq(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset));
   2137   // Move the pointer so that offset-wise, it looks like a sequential string.
   2138   STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
   2139   __ subq(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
   2140   STATIC_ASSERT(kTwoByteStringTag == 0);
   2141   // (8a) Is the external string one byte?  If yes, go to (6).
   2142   __ testb(rbx, Immediate(kStringEncodingMask));
   2143   __ j(not_zero, &seq_one_byte_string);  // Goto (6).
   2144 
   2145   // rdi: subject string (flat two-byte)
   2146   // rax: RegExp data (FixedArray)
   2147   // (9) Two byte sequential.  Load regexp code for one byte.  Go to (E).
   2148   __ bind(&seq_two_byte_string);
   2149   __ movq(r11, FieldOperand(rax, JSRegExp::kDataUC16CodeOffset));
   2150   __ Set(rcx, 0);  // Type is two byte.
   2151   __ jmp(&check_code);  // Go to (E).
   2152 
   2153   // (10) Not a string or a short external string?  If yes, bail out to runtime.
   2154   __ bind(&not_long_external);
   2155   // Catch non-string subject or short external string.
   2156   STATIC_ASSERT(kNotStringTag != 0 && kShortExternalStringTag !=0);
   2157   __ testb(rbx, Immediate(kIsNotStringMask | kShortExternalStringMask));
   2158   __ j(not_zero, &runtime);
   2159 
   2160   // (11) Sliced string.  Replace subject with parent. Go to (5a).
   2161   // Load offset into r14 and replace subject string with parent.
   2162   __ SmiToInteger32(r14, FieldOperand(rdi, SlicedString::kOffsetOffset));
   2163   __ movq(rdi, FieldOperand(rdi, SlicedString::kParentOffset));
   2164   __ jmp(&check_underlying);
   2165 #endif  // V8_INTERPRETED_REGEXP
   2166 }
   2167 
   2168 
   2169 void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
   2170   const int kMaxInlineLength = 100;
   2171   Label slowcase;
   2172   Label done;
   2173   StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER);
   2174   __ movq(r8, args.GetArgumentOperand(0));
   2175   __ JumpIfNotSmi(r8, &slowcase);
   2176   __ SmiToInteger32(rbx, r8);
   2177   __ cmpl(rbx, Immediate(kMaxInlineLength));
   2178   __ j(above, &slowcase);
   2179   // Smi-tagging is equivalent to multiplying by 2.
   2180   STATIC_ASSERT(kSmiTag == 0);
   2181   STATIC_ASSERT(kSmiTagSize == 1);
   2182   // Allocate RegExpResult followed by FixedArray with size in rbx.
   2183   // JSArray:   [Map][empty properties][Elements][Length-smi][index][input]
   2184   // Elements:  [Map][Length][..elements..]
   2185   __ Allocate(JSRegExpResult::kSize + FixedArray::kHeaderSize,
   2186               times_pointer_size,
   2187               rbx,  // In: Number of elements.
   2188               rax,  // Out: Start of allocation (tagged).
   2189               rcx,  // Out: End of allocation.
   2190               rdx,  // Scratch register
   2191               &slowcase,
   2192               TAG_OBJECT);
   2193   // rax: Start of allocated area, object-tagged.
   2194   // rbx: Number of array elements as int32.
   2195   // r8: Number of array elements as smi.
   2196 
   2197   // Set JSArray map to global.regexp_result_map().
   2198   __ movq(rdx, ContextOperand(rsi, Context::GLOBAL_OBJECT_INDEX));
   2199   __ movq(rdx, FieldOperand(rdx, GlobalObject::kNativeContextOffset));
   2200   __ movq(rdx, ContextOperand(rdx, Context::REGEXP_RESULT_MAP_INDEX));
   2201   __ movq(FieldOperand(rax, HeapObject::kMapOffset), rdx);
   2202 
   2203   // Set empty properties FixedArray.
   2204   __ LoadRoot(kScratchRegister, Heap::kEmptyFixedArrayRootIndex);
   2205   __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), kScratchRegister);
   2206 
   2207   // Set elements to point to FixedArray allocated right after the JSArray.
   2208   __ lea(rcx, Operand(rax, JSRegExpResult::kSize));
   2209   __ movq(FieldOperand(rax, JSObject::kElementsOffset), rcx);
   2210 
   2211   // Set input, index and length fields from arguments.
   2212   __ movq(r8, args.GetArgumentOperand(2));
   2213   __ movq(FieldOperand(rax, JSRegExpResult::kInputOffset), r8);
   2214   __ movq(r8, args.GetArgumentOperand(1));
   2215   __ movq(FieldOperand(rax, JSRegExpResult::kIndexOffset), r8);
   2216   __ movq(r8, args.GetArgumentOperand(0));
   2217   __ movq(FieldOperand(rax, JSArray::kLengthOffset), r8);
   2218 
   2219   // Fill out the elements FixedArray.
   2220   // rax: JSArray.
   2221   // rcx: FixedArray.
   2222   // rbx: Number of elements in array as int32.
   2223 
   2224   // Set map.
   2225   __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex);
   2226   __ movq(FieldOperand(rcx, HeapObject::kMapOffset), kScratchRegister);
   2227   // Set length.
   2228   __ Integer32ToSmi(rdx, rbx);
   2229   __ movq(FieldOperand(rcx, FixedArray::kLengthOffset), rdx);
   2230   // Fill contents of fixed-array with undefined.
   2231   __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
   2232   __ lea(rcx, FieldOperand(rcx, FixedArray::kHeaderSize));
   2233   // Fill fixed array elements with undefined.
   2234   // rax: JSArray.
   2235   // rbx: Number of elements in array that remains to be filled, as int32.
   2236   // rcx: Start of elements in FixedArray.
   2237   // rdx: undefined.
   2238   Label loop;
   2239   __ testl(rbx, rbx);
   2240   __ bind(&loop);
   2241   __ j(less_equal, &done);  // Jump if rcx is negative or zero.
   2242   __ subl(rbx, Immediate(1));
   2243   __ movq(Operand(rcx, rbx, times_pointer_size, 0), rdx);
   2244   __ jmp(&loop);
   2245 
   2246   __ bind(&done);
   2247   __ ret(3 * kPointerSize);
   2248 
   2249   __ bind(&slowcase);
   2250   __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1);
   2251 }
   2252 
   2253 
   2254 static int NegativeComparisonResult(Condition cc) {
   2255   ASSERT(cc != equal);
   2256   ASSERT((cc == less) || (cc == less_equal)
   2257       || (cc == greater) || (cc == greater_equal));
   2258   return (cc == greater || cc == greater_equal) ? LESS : GREATER;
   2259 }
   2260 
   2261 
   2262 static void CheckInputType(MacroAssembler* masm,
   2263                            Register input,
   2264                            CompareIC::State expected,
   2265                            Label* fail) {
   2266   Label ok;
   2267   if (expected == CompareIC::SMI) {
   2268     __ JumpIfNotSmi(input, fail);
   2269   } else if (expected == CompareIC::NUMBER) {
   2270     __ JumpIfSmi(input, &ok);
   2271     __ CompareMap(input, masm->isolate()->factory()->heap_number_map());
   2272     __ j(not_equal, fail);
   2273   }
   2274   // We could be strict about internalized/non-internalized here, but as long as
   2275   // hydrogen doesn't care, the stub doesn't have to care either.
   2276   __ bind(&ok);
   2277 }
   2278 
   2279 
   2280 static void BranchIfNotInternalizedString(MacroAssembler* masm,
   2281                                           Label* label,
   2282                                           Register object,
   2283                                           Register scratch) {
   2284   __ JumpIfSmi(object, label);
   2285   __ movq(scratch, FieldOperand(object, HeapObject::kMapOffset));
   2286   __ movzxbq(scratch,
   2287              FieldOperand(scratch, Map::kInstanceTypeOffset));
   2288   STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
   2289   __ testb(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
   2290   __ j(not_zero, label);
   2291 }
   2292 
   2293 
   2294 void ICCompareStub::GenerateGeneric(MacroAssembler* masm) {
   2295   Label check_unequal_objects, done;
   2296   Condition cc = GetCondition();
   2297   Factory* factory = masm->isolate()->factory();
   2298 
   2299   Label miss;
   2300   CheckInputType(masm, rdx, left_, &miss);
   2301   CheckInputType(masm, rax, right_, &miss);
   2302 
   2303   // Compare two smis.
   2304   Label non_smi, smi_done;
   2305   __ JumpIfNotBothSmi(rax, rdx, &non_smi);
   2306   __ subq(rdx, rax);
   2307   __ j(no_overflow, &smi_done);
   2308   __ not_(rdx);  // Correct sign in case of overflow. rdx cannot be 0 here.
   2309   __ bind(&smi_done);
   2310   __ movq(rax, rdx);
   2311   __ ret(0);
   2312   __ bind(&non_smi);
   2313 
   2314   // The compare stub returns a positive, negative, or zero 64-bit integer
   2315   // value in rax, corresponding to result of comparing the two inputs.
   2316   // NOTICE! This code is only reached after a smi-fast-case check, so
   2317   // it is certain that at least one operand isn't a smi.
   2318 
   2319   // Two identical objects are equal unless they are both NaN or undefined.
   2320   {
   2321     Label not_identical;
   2322     __ cmpq(rax, rdx);
   2323     __ j(not_equal, &not_identical, Label::kNear);
   2324 
   2325     if (cc != equal) {
   2326       // Check for undefined.  undefined OP undefined is false even though
   2327       // undefined == undefined.
   2328       Label check_for_nan;
   2329       __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex);
   2330       __ j(not_equal, &check_for_nan, Label::kNear);
   2331       __ Set(rax, NegativeComparisonResult(cc));
   2332       __ ret(0);
   2333       __ bind(&check_for_nan);
   2334     }
   2335 
   2336     // Test for NaN. Sadly, we can't just compare to Factory::nan_value(),
   2337     // so we do the second best thing - test it ourselves.
   2338     Label heap_number;
   2339     // If it's not a heap number, then return equal for (in)equality operator.
   2340     __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
   2341            factory->heap_number_map());
   2342     __ j(equal, &heap_number, Label::kNear);
   2343     if (cc != equal) {
   2344       // Call runtime on identical objects.  Otherwise return equal.
   2345       __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
   2346       __ j(above_equal, &not_identical, Label::kNear);
   2347     }
   2348     __ Set(rax, EQUAL);
   2349     __ ret(0);
   2350 
   2351     __ bind(&heap_number);
   2352     // It is a heap number, so return  equal if it's not NaN.
   2353     // For NaN, return 1 for every condition except greater and
   2354     // greater-equal.  Return -1 for them, so the comparison yields
   2355     // false for all conditions except not-equal.
   2356     __ Set(rax, EQUAL);
   2357     __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
   2358     __ ucomisd(xmm0, xmm0);
   2359     __ setcc(parity_even, rax);
   2360     // rax is 0 for equal non-NaN heapnumbers, 1 for NaNs.
   2361     if (cc == greater_equal || cc == greater) {
   2362       __ neg(rax);
   2363     }
   2364     __ ret(0);
   2365 
   2366     __ bind(&not_identical);
   2367   }
   2368 
   2369   if (cc == equal) {  // Both strict and non-strict.
   2370     Label slow;  // Fallthrough label.
   2371 
   2372     // If we're doing a strict equality comparison, we don't have to do
   2373     // type conversion, so we generate code to do fast comparison for objects
   2374     // and oddballs. Non-smi numbers and strings still go through the usual
   2375     // slow-case code.
   2376     if (strict()) {
   2377       // If either is a Smi (we know that not both are), then they can only
   2378       // be equal if the other is a HeapNumber. If so, use the slow case.
   2379       {
   2380         Label not_smis;
   2381         __ SelectNonSmi(rbx, rax, rdx, &not_smis);
   2382 
   2383         // Check if the non-smi operand is a heap number.
   2384         __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
   2385                factory->heap_number_map());
   2386         // If heap number, handle it in the slow case.
   2387         __ j(equal, &slow);
   2388         // Return non-equal.  ebx (the lower half of rbx) is not zero.
   2389         __ movq(rax, rbx);
   2390         __ ret(0);
   2391 
   2392         __ bind(&not_smis);
   2393       }
   2394 
   2395       // If either operand is a JSObject or an oddball value, then they are not
   2396       // equal since their pointers are different
   2397       // There is no test for undetectability in strict equality.
   2398 
   2399       // If the first object is a JS object, we have done pointer comparison.
   2400       STATIC_ASSERT(LAST_TYPE == LAST_SPEC_OBJECT_TYPE);
   2401       Label first_non_object;
   2402       __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
   2403       __ j(below, &first_non_object, Label::kNear);
   2404       // Return non-zero (rax (not rax) is not zero)
   2405       Label return_not_equal;
   2406       STATIC_ASSERT(kHeapObjectTag != 0);
   2407       __ bind(&return_not_equal);
   2408       __ ret(0);
   2409 
   2410       __ bind(&first_non_object);
   2411       // Check for oddballs: true, false, null, undefined.
   2412       __ CmpInstanceType(rcx, ODDBALL_TYPE);
   2413       __ j(equal, &return_not_equal);
   2414 
   2415       __ CmpObjectType(rdx, FIRST_SPEC_OBJECT_TYPE, rcx);
   2416       __ j(above_equal, &return_not_equal);
   2417 
   2418       // Check for oddballs: true, false, null, undefined.
   2419       __ CmpInstanceType(rcx, ODDBALL_TYPE);
   2420       __ j(equal, &return_not_equal);
   2421 
   2422       // Fall through to the general case.
   2423     }
   2424     __ bind(&slow);
   2425   }
   2426 
   2427   // Generate the number comparison code.
   2428   Label non_number_comparison;
   2429   Label unordered;
   2430   FloatingPointHelper::LoadSSE2UnknownOperands(masm, &non_number_comparison);
   2431   __ xorl(rax, rax);
   2432   __ xorl(rcx, rcx);
   2433   __ ucomisd(xmm0, xmm1);
   2434 
   2435   // Don't base result on EFLAGS when a NaN is involved.
   2436   __ j(parity_even, &unordered, Label::kNear);
   2437   // Return a result of -1, 0, or 1, based on EFLAGS.
   2438   __ setcc(above, rax);
   2439   __ setcc(below, rcx);
   2440   __ subq(rax, rcx);
   2441   __ ret(0);
   2442 
   2443   // If one of the numbers was NaN, then the result is always false.
   2444   // The cc is never not-equal.
   2445   __ bind(&unordered);
   2446   ASSERT(cc != not_equal);
   2447   if (cc == less || cc == less_equal) {
   2448     __ Set(rax, 1);
   2449   } else {
   2450     __ Set(rax, -1);
   2451   }
   2452   __ ret(0);
   2453 
   2454   // The number comparison code did not provide a valid result.
   2455   __ bind(&non_number_comparison);
   2456 
   2457   // Fast negative check for internalized-to-internalized equality.
   2458   Label check_for_strings;
   2459   if (cc == equal) {
   2460     BranchIfNotInternalizedString(
   2461         masm, &check_for_strings, rax, kScratchRegister);
   2462     BranchIfNotInternalizedString(
   2463         masm, &check_for_strings, rdx, kScratchRegister);
   2464 
   2465     // We've already checked for object identity, so if both operands are
   2466     // internalized strings they aren't equal. Register rax (not rax) already
   2467     // holds a non-zero value, which indicates not equal, so just return.
   2468     __ ret(0);
   2469   }
   2470 
   2471   __ bind(&check_for_strings);
   2472 
   2473   __ JumpIfNotBothSequentialAsciiStrings(
   2474       rdx, rax, rcx, rbx, &check_unequal_objects);
   2475 
   2476   // Inline comparison of ASCII strings.
   2477   if (cc == equal) {
   2478     StringCompareStub::GenerateFlatAsciiStringEquals(masm,
   2479                                                      rdx,
   2480                                                      rax,
   2481                                                      rcx,
   2482                                                      rbx);
   2483   } else {
   2484     StringCompareStub::GenerateCompareFlatAsciiStrings(masm,
   2485                                                        rdx,
   2486                                                        rax,
   2487                                                        rcx,
   2488                                                        rbx,
   2489                                                        rdi,
   2490                                                        r8);
   2491   }
   2492 
   2493 #ifdef DEBUG
   2494   __ Abort(kUnexpectedFallThroughFromStringComparison);
   2495 #endif
   2496 
   2497   __ bind(&check_unequal_objects);
   2498   if (cc == equal && !strict()) {
   2499     // Not strict equality.  Objects are unequal if
   2500     // they are both JSObjects and not undetectable,
   2501     // and their pointers are different.
   2502     Label not_both_objects, return_unequal;
   2503     // At most one is a smi, so we can test for smi by adding the two.
   2504     // A smi plus a heap object has the low bit set, a heap object plus
   2505     // a heap object has the low bit clear.
   2506     STATIC_ASSERT(kSmiTag == 0);
   2507     STATIC_ASSERT(kSmiTagMask == 1);
   2508     __ lea(rcx, Operand(rax, rdx, times_1, 0));
   2509     __ testb(rcx, Immediate(kSmiTagMask));
   2510     __ j(not_zero, &not_both_objects, Label::kNear);
   2511     __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rbx);
   2512     __ j(below, &not_both_objects, Label::kNear);
   2513     __ CmpObjectType(rdx, FIRST_SPEC_OBJECT_TYPE, rcx);
   2514     __ j(below, &not_both_objects, Label::kNear);
   2515     __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
   2516              Immediate(1 << Map::kIsUndetectable));
   2517     __ j(zero, &return_unequal, Label::kNear);
   2518     __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
   2519              Immediate(1 << Map::kIsUndetectable));
   2520     __ j(zero, &return_unequal, Label::kNear);
   2521     // The objects are both undetectable, so they both compare as the value
   2522     // undefined, and are equal.
   2523     __ Set(rax, EQUAL);
   2524     __ bind(&return_unequal);
   2525     // Return non-equal by returning the non-zero object pointer in rax,
   2526     // or return equal if we fell through to here.
   2527     __ ret(0);
   2528     __ bind(&not_both_objects);
   2529   }
   2530 
   2531   // Push arguments below the return address to prepare jump to builtin.
   2532   __ PopReturnAddressTo(rcx);
   2533   __ push(rdx);
   2534   __ push(rax);
   2535 
   2536   // Figure out which native to call and setup the arguments.
   2537   Builtins::JavaScript builtin;
   2538   if (cc == equal) {
   2539     builtin = strict() ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
   2540   } else {
   2541     builtin = Builtins::COMPARE;
   2542     __ Push(Smi::FromInt(NegativeComparisonResult(cc)));
   2543   }
   2544 
   2545   __ PushReturnAddressFrom(rcx);
   2546 
   2547   // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
   2548   // tagged as a small integer.
   2549   __ InvokeBuiltin(builtin, JUMP_FUNCTION);
   2550 
   2551   __ bind(&miss);
   2552   GenerateMiss(masm);
   2553 }
   2554 
   2555 
   2556 static void GenerateRecordCallTarget(MacroAssembler* masm) {
   2557   // Cache the called function in a global property cell.  Cache states
   2558   // are uninitialized, monomorphic (indicated by a JSFunction), and
   2559   // megamorphic.
   2560   // rax : number of arguments to the construct function
   2561   // rbx : cache cell for call target
   2562   // rdi : the function to call
   2563   Isolate* isolate = masm->isolate();
   2564   Label initialize, done, miss, megamorphic, not_array_function;
   2565 
   2566   // Load the cache state into rcx.
   2567   __ movq(rcx, FieldOperand(rbx, Cell::kValueOffset));
   2568 
   2569   // A monomorphic cache hit or an already megamorphic state: invoke the
   2570   // function without changing the state.
   2571   __ cmpq(rcx, rdi);
   2572   __ j(equal, &done);
   2573   __ Cmp(rcx, TypeFeedbackCells::MegamorphicSentinel(isolate));
   2574   __ j(equal, &done);
   2575 
   2576   // If we came here, we need to see if we are the array function.
   2577   // If we didn't have a matching function, and we didn't find the megamorph
   2578   // sentinel, then we have in the cell either some other function or an
   2579   // AllocationSite. Do a map check on the object in rcx.
   2580   Handle<Map> allocation_site_map =
   2581       masm->isolate()->factory()->allocation_site_map();
   2582   __ Cmp(FieldOperand(rcx, 0), allocation_site_map);
   2583   __ j(not_equal, &miss);
   2584 
   2585   // Make sure the function is the Array() function
   2586   __ LoadArrayFunction(rcx);
   2587   __ cmpq(rdi, rcx);
   2588   __ j(not_equal, &megamorphic);
   2589   __ jmp(&done);
   2590 
   2591   __ bind(&miss);
   2592 
   2593   // A monomorphic miss (i.e, here the cache is not uninitialized) goes
   2594   // megamorphic.
   2595   __ Cmp(rcx, TypeFeedbackCells::UninitializedSentinel(isolate));
   2596   __ j(equal, &initialize);
   2597   // MegamorphicSentinel is an immortal immovable object (undefined) so no
   2598   // write-barrier is needed.
   2599   __ bind(&megamorphic);
   2600   __ Move(FieldOperand(rbx, Cell::kValueOffset),
   2601           TypeFeedbackCells::MegamorphicSentinel(isolate));
   2602   __ jmp(&done);
   2603 
   2604   // An uninitialized cache is patched with the function or sentinel to
   2605   // indicate the ElementsKind if function is the Array constructor.
   2606   __ bind(&initialize);
   2607   // Make sure the function is the Array() function
   2608   __ LoadArrayFunction(rcx);
   2609   __ cmpq(rdi, rcx);
   2610   __ j(not_equal, &not_array_function);
   2611 
   2612   // The target function is the Array constructor,
   2613   // Create an AllocationSite if we don't already have it, store it in the cell
   2614   {
   2615     FrameScope scope(masm, StackFrame::INTERNAL);
   2616 
   2617     // Arguments register must be smi-tagged to call out.
   2618     __ Integer32ToSmi(rax, rax);
   2619     __ push(rax);
   2620     __ push(rdi);
   2621     __ push(rbx);
   2622 
   2623     CreateAllocationSiteStub create_stub;
   2624     __ CallStub(&create_stub);
   2625 
   2626     __ pop(rbx);
   2627     __ pop(rdi);
   2628     __ pop(rax);
   2629     __ SmiToInteger32(rax, rax);
   2630   }
   2631   __ jmp(&done);
   2632 
   2633   __ bind(&not_array_function);
   2634   __ movq(FieldOperand(rbx, Cell::kValueOffset), rdi);
   2635   // No need for a write barrier here - cells are rescanned.
   2636 
   2637   __ bind(&done);
   2638 }
   2639 
   2640 
   2641 void CallFunctionStub::Generate(MacroAssembler* masm) {
   2642   // rbx : cache cell for call target
   2643   // rdi : the function to call
   2644   Isolate* isolate = masm->isolate();
   2645   Label slow, non_function;
   2646   StackArgumentsAccessor args(rsp, argc_);
   2647 
   2648   // The receiver might implicitly be the global object. This is
   2649   // indicated by passing the hole as the receiver to the call
   2650   // function stub.
   2651   if (ReceiverMightBeImplicit()) {
   2652     Label call;
   2653     // Get the receiver from the stack.
   2654     __ movq(rax, args.GetReceiverOperand());
   2655     // Call as function is indicated with the hole.
   2656     __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
   2657     __ j(not_equal, &call, Label::kNear);
   2658     // Patch the receiver on the stack with the global receiver object.
   2659     __ movq(rcx, GlobalObjectOperand());
   2660     __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalReceiverOffset));
   2661     __ movq(args.GetReceiverOperand(), rcx);
   2662     __ bind(&call);
   2663   }
   2664 
   2665   // Check that the function really is a JavaScript function.
   2666   __ JumpIfSmi(rdi, &non_function);
   2667   // Goto slow case if we do not have a function.
   2668   __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
   2669   __ j(not_equal, &slow);
   2670 
   2671   if (RecordCallTarget()) {
   2672     GenerateRecordCallTarget(masm);
   2673   }
   2674 
   2675   // Fast-case: Just invoke the function.
   2676   ParameterCount actual(argc_);
   2677 
   2678   if (ReceiverMightBeImplicit()) {
   2679     Label call_as_function;
   2680     __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
   2681     __ j(equal, &call_as_function);
   2682     __ InvokeFunction(rdi,
   2683                       actual,
   2684                       JUMP_FUNCTION,
   2685                       NullCallWrapper(),
   2686                       CALL_AS_METHOD);
   2687     __ bind(&call_as_function);
   2688   }
   2689   __ InvokeFunction(rdi,
   2690                     actual,
   2691                     JUMP_FUNCTION,
   2692                     NullCallWrapper(),
   2693                     CALL_AS_FUNCTION);
   2694 
   2695   // Slow-case: Non-function called.
   2696   __ bind(&slow);
   2697   if (RecordCallTarget()) {
   2698     // If there is a call target cache, mark it megamorphic in the
   2699     // non-function case.  MegamorphicSentinel is an immortal immovable
   2700     // object (undefined) so no write barrier is needed.
   2701     __ Move(FieldOperand(rbx, Cell::kValueOffset),
   2702             TypeFeedbackCells::MegamorphicSentinel(isolate));
   2703   }
   2704   // Check for function proxy.
   2705   __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE);
   2706   __ j(not_equal, &non_function);
   2707   __ PopReturnAddressTo(rcx);
   2708   __ push(rdi);  // put proxy as additional argument under return address
   2709   __ PushReturnAddressFrom(rcx);
   2710   __ Set(rax, argc_ + 1);
   2711   __ Set(rbx, 0);
   2712   __ SetCallKind(rcx, CALL_AS_METHOD);
   2713   __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
   2714   {
   2715     Handle<Code> adaptor =
   2716       masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
   2717     __ jmp(adaptor, RelocInfo::CODE_TARGET);
   2718   }
   2719 
   2720   // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
   2721   // of the original receiver from the call site).
   2722   __ bind(&non_function);
   2723   __ movq(args.GetReceiverOperand(), rdi);
   2724   __ Set(rax, argc_);
   2725   __ Set(rbx, 0);
   2726   __ SetCallKind(rcx, CALL_AS_METHOD);
   2727   __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
   2728   Handle<Code> adaptor =
   2729       isolate->builtins()->ArgumentsAdaptorTrampoline();
   2730   __ Jump(adaptor, RelocInfo::CODE_TARGET);
   2731 }
   2732 
   2733 
   2734 void CallConstructStub::Generate(MacroAssembler* masm) {
   2735   // rax : number of arguments
   2736   // rbx : cache cell for call target
   2737   // rdi : constructor function
   2738   Label slow, non_function_call;
   2739 
   2740   // Check that function is not a smi.
   2741   __ JumpIfSmi(rdi, &non_function_call);
   2742   // Check that function is a JSFunction.
   2743   __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
   2744   __ j(not_equal, &slow);
   2745 
   2746   if (RecordCallTarget()) {
   2747     GenerateRecordCallTarget(masm);
   2748   }
   2749 
   2750   // Jump to the function-specific construct stub.
   2751   Register jmp_reg = rcx;
   2752   __ movq(jmp_reg, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
   2753   __ movq(jmp_reg, FieldOperand(jmp_reg,
   2754                                 SharedFunctionInfo::kConstructStubOffset));
   2755   __ lea(jmp_reg, FieldOperand(jmp_reg, Code::kHeaderSize));
   2756   __ jmp(jmp_reg);
   2757 
   2758   // rdi: called object
   2759   // rax: number of arguments
   2760   // rcx: object map
   2761   Label do_call;
   2762   __ bind(&slow);
   2763   __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE);
   2764   __ j(not_equal, &non_function_call);
   2765   __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR);
   2766   __ jmp(&do_call);
   2767 
   2768   __ bind(&non_function_call);
   2769   __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
   2770   __ bind(&do_call);
   2771   // Set expected number of arguments to zero (not changing rax).
   2772   __ Set(rbx, 0);
   2773   __ SetCallKind(rcx, CALL_AS_METHOD);
   2774   __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
   2775           RelocInfo::CODE_TARGET);
   2776 }
   2777 
   2778 
   2779 bool CEntryStub::NeedsImmovableCode() {
   2780   return false;
   2781 }
   2782 
   2783 
   2784 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
   2785   CEntryStub::GenerateAheadOfTime(isolate);
   2786   StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
   2787   StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
   2788   // It is important that the store buffer overflow stubs are generated first.
   2789   ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
   2790   CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
   2791   BinaryOpICStub::GenerateAheadOfTime(isolate);
   2792 }
   2793 
   2794 
   2795 void CodeStub::GenerateFPStubs(Isolate* isolate) {
   2796 }
   2797 
   2798 
   2799 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
   2800   CEntryStub stub(1, kDontSaveFPRegs);
   2801   stub.GetCode(isolate);
   2802   CEntryStub save_doubles(1, kSaveFPRegs);
   2803   save_doubles.GetCode(isolate);
   2804 }
   2805 
   2806 
   2807 static void JumpIfOOM(MacroAssembler* masm,
   2808                       Register value,
   2809                       Register scratch,
   2810                       Label* oom_label) {
   2811   __ movq(scratch, value);
   2812   STATIC_ASSERT(Failure::OUT_OF_MEMORY_EXCEPTION == 3);
   2813   STATIC_ASSERT(kFailureTag == 3);
   2814   __ and_(scratch, Immediate(0xf));
   2815   __ cmpq(scratch, Immediate(0xf));
   2816   __ j(equal, oom_label);
   2817 }
   2818 
   2819 
   2820 void CEntryStub::GenerateCore(MacroAssembler* masm,
   2821                               Label* throw_normal_exception,
   2822                               Label* throw_termination_exception,
   2823                               Label* throw_out_of_memory_exception,
   2824                               bool do_gc,
   2825                               bool always_allocate_scope) {
   2826   // rax: result parameter for PerformGC, if any.
   2827   // rbx: pointer to C function  (C callee-saved).
   2828   // rbp: frame pointer  (restored after C call).
   2829   // rsp: stack pointer  (restored after C call).
   2830   // r14: number of arguments including receiver (C callee-saved).
   2831   // r15: pointer to the first argument (C callee-saved).
   2832   //      This pointer is reused in LeaveExitFrame(), so it is stored in a
   2833   //      callee-saved register.
   2834 
   2835   // Simple results returned in rax (both AMD64 and Win64 calling conventions).
   2836   // Complex results must be written to address passed as first argument.
   2837   // AMD64 calling convention: a struct of two pointers in rax+rdx
   2838 
   2839   // Check stack alignment.
   2840   if (FLAG_debug_code) {
   2841     __ CheckStackAlignment();
   2842   }
   2843 
   2844   if (do_gc) {
   2845     // Pass failure code returned from last attempt as first argument to
   2846     // PerformGC. No need to use PrepareCallCFunction/CallCFunction here as the
   2847     // stack is known to be aligned. This function takes one argument which is
   2848     // passed in register.
   2849     __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
   2850     __ movq(arg_reg_1, rax);
   2851     __ Move(kScratchRegister,
   2852             ExternalReference::perform_gc_function(masm->isolate()));
   2853     __ call(kScratchRegister);
   2854   }
   2855 
   2856   ExternalReference scope_depth =
   2857       ExternalReference::heap_always_allocate_scope_depth(masm->isolate());
   2858   if (always_allocate_scope) {
   2859     Operand scope_depth_operand = masm->ExternalOperand(scope_depth);
   2860     __ incl(scope_depth_operand);
   2861   }
   2862 
   2863   // Call C function.
   2864 #ifdef _WIN64
   2865   // Windows 64-bit ABI passes arguments in rcx, rdx, r8, r9.
   2866   // Pass argv and argc as two parameters. The arguments object will
   2867   // be created by stubs declared by DECLARE_RUNTIME_FUNCTION().
   2868   if (result_size_ < 2) {
   2869     // Pass a pointer to the Arguments object as the first argument.
   2870     // Return result in single register (rax).
   2871     __ movq(rcx, r14);  // argc.
   2872     __ movq(rdx, r15);  // argv.
   2873     __ Move(r8, ExternalReference::isolate_address(masm->isolate()));
   2874   } else {
   2875     ASSERT_EQ(2, result_size_);
   2876     // Pass a pointer to the result location as the first argument.
   2877     __ lea(rcx, StackSpaceOperand(2));
   2878     // Pass a pointer to the Arguments object as the second argument.
   2879     __ movq(rdx, r14);  // argc.
   2880     __ movq(r8, r15);   // argv.
   2881     __ Move(r9, ExternalReference::isolate_address(masm->isolate()));
   2882   }
   2883 
   2884 #else  // _WIN64
   2885   // GCC passes arguments in rdi, rsi, rdx, rcx, r8, r9.
   2886   __ movq(rdi, r14);  // argc.
   2887   __ movq(rsi, r15);  // argv.
   2888   __ Move(rdx, ExternalReference::isolate_address(masm->isolate()));
   2889 #endif
   2890   __ call(rbx);
   2891   // Result is in rax - do not destroy this register!
   2892 
   2893   if (always_allocate_scope) {
   2894     Operand scope_depth_operand = masm->ExternalOperand(scope_depth);
   2895     __ decl(scope_depth_operand);
   2896   }
   2897 
   2898   // Check for failure result.
   2899   Label failure_returned;
   2900   STATIC_ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0);
   2901 #ifdef _WIN64
   2902   // If return value is on the stack, pop it to registers.
   2903   if (result_size_ > 1) {
   2904     ASSERT_EQ(2, result_size_);
   2905     // Read result values stored on stack. Result is stored
   2906     // above the four argument mirror slots and the two
   2907     // Arguments object slots.
   2908     __ movq(rax, Operand(rsp, 6 * kPointerSize));
   2909     __ movq(rdx, Operand(rsp, 7 * kPointerSize));
   2910   }
   2911 #endif
   2912   __ lea(rcx, Operand(rax, 1));
   2913   // Lower 2 bits of rcx are 0 iff rax has failure tag.
   2914   __ testl(rcx, Immediate(kFailureTagMask));
   2915   __ j(zero, &failure_returned);
   2916 
   2917   // Exit the JavaScript to C++ exit frame.
   2918   __ LeaveExitFrame(save_doubles_);
   2919   __ ret(0);
   2920 
   2921   // Handling of failure.
   2922   __ bind(&failure_returned);
   2923 
   2924   Label retry;
   2925   // If the returned exception is RETRY_AFTER_GC continue at retry label
   2926   STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0);
   2927   __ testl(rax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
   2928   __ j(zero, &retry, Label::kNear);
   2929 
   2930   // Special handling of out of memory exceptions.
   2931   JumpIfOOM(masm, rax, kScratchRegister, throw_out_of_memory_exception);
   2932 
   2933   // Retrieve the pending exception.
   2934   ExternalReference pending_exception_address(
   2935       Isolate::kPendingExceptionAddress, masm->isolate());
   2936   Operand pending_exception_operand =
   2937       masm->ExternalOperand(pending_exception_address);
   2938   __ movq(rax, pending_exception_operand);
   2939 
   2940   // See if we just retrieved an OOM exception.
   2941   JumpIfOOM(masm, rax, kScratchRegister, throw_out_of_memory_exception);
   2942 
   2943   // Clear the pending exception.
   2944   pending_exception_operand =
   2945       masm->ExternalOperand(pending_exception_address);
   2946   __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex);
   2947   __ movq(pending_exception_operand, rdx);
   2948 
   2949   // Special handling of termination exceptions which are uncatchable
   2950   // by javascript code.
   2951   __ CompareRoot(rax, Heap::kTerminationExceptionRootIndex);
   2952   __ j(equal, throw_termination_exception);
   2953 
   2954   // Handle normal exception.
   2955   __ jmp(throw_normal_exception);
   2956 
   2957   // Retry.
   2958   __ bind(&retry);
   2959 }
   2960 
   2961 
   2962 void CEntryStub::Generate(MacroAssembler* masm) {
   2963   // rax: number of arguments including receiver
   2964   // rbx: pointer to C function  (C callee-saved)
   2965   // rbp: frame pointer of calling JS frame (restored after C call)
   2966   // rsp: stack pointer  (restored after C call)
   2967   // rsi: current context (restored)
   2968 
   2969   // NOTE: Invocations of builtins may return failure objects
   2970   // instead of a proper result. The builtin entry handles
   2971   // this by performing a garbage collection and retrying the
   2972   // builtin once.
   2973 
   2974   ProfileEntryHookStub::MaybeCallEntryHook(masm);
   2975 
   2976   // Enter the exit frame that transitions from JavaScript to C++.
   2977 #ifdef _WIN64
   2978   int arg_stack_space = (result_size_ < 2 ? 2 : 4);
   2979 #else
   2980   int arg_stack_space = 0;
   2981 #endif
   2982   __ EnterExitFrame(arg_stack_space, save_doubles_);
   2983 
   2984   // rax: Holds the context at this point, but should not be used.
   2985   //      On entry to code generated by GenerateCore, it must hold
   2986   //      a failure result if the collect_garbage argument to GenerateCore
   2987   //      is true.  This failure result can be the result of code
   2988   //      generated by a previous call to GenerateCore.  The value
   2989   //      of rax is then passed to Runtime::PerformGC.
   2990   // rbx: pointer to builtin function  (C callee-saved).
   2991   // rbp: frame pointer of exit frame  (restored after C call).
   2992   // rsp: stack pointer (restored after C call).
   2993   // r14: number of arguments including receiver (C callee-saved).
   2994   // r15: argv pointer (C callee-saved).
   2995 
   2996   Label throw_normal_exception;
   2997   Label throw_termination_exception;
   2998   Label throw_out_of_memory_exception;
   2999 
   3000   // Call into the runtime system.
   3001   GenerateCore(masm,
   3002                &throw_normal_exception,
   3003                &throw_termination_exception,
   3004                &throw_out_of_memory_exception,
   3005                false,
   3006                false);
   3007 
   3008   // Do space-specific GC and retry runtime call.
   3009   GenerateCore(masm,
   3010                &throw_normal_exception,
   3011                &throw_termination_exception,
   3012                &throw_out_of_memory_exception,
   3013                true,
   3014                false);
   3015 
   3016   // Do full GC and retry runtime call one final time.
   3017   Failure* failure = Failure::InternalError();
   3018   __ movq(rax, failure, RelocInfo::NONE64);
   3019   GenerateCore(masm,
   3020                &throw_normal_exception,
   3021                &throw_termination_exception,
   3022                &throw_out_of_memory_exception,
   3023                true,
   3024                true);
   3025 
   3026   __ bind(&throw_out_of_memory_exception);
   3027   // Set external caught exception to false.
   3028   Isolate* isolate = masm->isolate();
   3029   ExternalReference external_caught(Isolate::kExternalCaughtExceptionAddress,
   3030                                     isolate);
   3031   __ Set(rax, static_cast<int64_t>(false));
   3032   __ Store(external_caught, rax);
   3033 
   3034   // Set pending exception and rax to out of memory exception.
   3035   ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
   3036                                       isolate);
   3037   Label already_have_failure;
   3038   JumpIfOOM(masm, rax, kScratchRegister, &already_have_failure);
   3039   __ movq(rax, Failure::OutOfMemoryException(0x1), RelocInfo::NONE64);
   3040   __ bind(&already_have_failure);
   3041   __ Store(pending_exception, rax);
   3042   // Fall through to the next label.
   3043 
   3044   __ bind(&throw_termination_exception);
   3045   __ ThrowUncatchable(rax);
   3046 
   3047   __ bind(&throw_normal_exception);
   3048   __ Throw(rax);
   3049 }
   3050 
   3051 
   3052 void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
   3053   Label invoke, handler_entry, exit;
   3054   Label not_outermost_js, not_outermost_js_2;
   3055 
   3056   ProfileEntryHookStub::MaybeCallEntryHook(masm);
   3057 
   3058   {  // NOLINT. Scope block confuses linter.
   3059     MacroAssembler::NoRootArrayScope uninitialized_root_register(masm);
   3060     // Set up frame.
   3061     __ push(rbp);
   3062     __ movq(rbp, rsp);
   3063 
   3064     // Push the stack frame type marker twice.
   3065     int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
   3066     // Scratch register is neither callee-save, nor an argument register on any
   3067     // platform. It's free to use at this point.
   3068     // Cannot use smi-register for loading yet.
   3069     __ movq(kScratchRegister, Smi::FromInt(marker), RelocInfo::NONE64);
   3070     __ push(kScratchRegister);  // context slot
   3071     __ push(kScratchRegister);  // function slot
   3072     // Save callee-saved registers (X64/Win64 calling conventions).
   3073     __ push(r12);
   3074     __ push(r13);
   3075     __ push(r14);
   3076     __ push(r15);
   3077 #ifdef _WIN64
   3078     __ push(rdi);  // Only callee save in Win64 ABI, argument in AMD64 ABI.
   3079     __ push(rsi);  // Only callee save in Win64 ABI, argument in AMD64 ABI.
   3080 #endif
   3081     __ push(rbx);
   3082 
   3083 #ifdef _WIN64
   3084     // On Win64 XMM6-XMM15 are callee-save
   3085     __ subq(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize));
   3086     __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0), xmm6);
   3087     __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1), xmm7);
   3088     __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2), xmm8);
   3089     __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3), xmm9);
   3090     __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4), xmm10);
   3091     __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5), xmm11);
   3092     __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6), xmm12);
   3093     __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7), xmm13);
   3094     __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8), xmm14);
   3095     __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9), xmm15);
   3096 #endif
   3097 
   3098     // Set up the roots and smi constant registers.
   3099     // Needs to be done before any further smi loads.
   3100     __ InitializeSmiConstantRegister();
   3101     __ InitializeRootRegister();
   3102   }
   3103 
   3104   Isolate* isolate = masm->isolate();
   3105 
   3106   // Save copies of the top frame descriptor on the stack.
   3107   ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate);
   3108   {
   3109     Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
   3110     __ push(c_entry_fp_operand);
   3111   }
   3112 
   3113   // If this is the outermost JS call, set js_entry_sp value.
   3114   ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate);
   3115   __ Load(rax, js_entry_sp);
   3116   __ testq(rax, rax);
   3117   __ j(not_zero, &not_outermost_js);
   3118   __ Push(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME));
   3119   __ movq(rax, rbp);
   3120   __ Store(js_entry_sp, rax);
   3121   Label cont;
   3122   __ jmp(&cont);
   3123   __ bind(&not_outermost_js);
   3124   __ Push(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME));
   3125   __ bind(&cont);
   3126 
   3127   // Jump to a faked try block that does the invoke, with a faked catch
   3128   // block that sets the pending exception.
   3129   __ jmp(&invoke);
   3130   __ bind(&handler_entry);
   3131   handler_offset_ = handler_entry.pos();
   3132   // Caught exception: Store result (exception) in the pending exception
   3133   // field in the JSEnv and return a failure sentinel.
   3134   ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
   3135                                       isolate);
   3136   __ Store(pending_exception, rax);
   3137   __ movq(rax, Failure::Exception(), RelocInfo::NONE64);
   3138   __ jmp(&exit);
   3139 
   3140   // Invoke: Link this frame into the handler chain.  There's only one
   3141   // handler block in this code object, so its index is 0.
   3142   __ bind(&invoke);
   3143   __ PushTryHandler(StackHandler::JS_ENTRY, 0);
   3144 
   3145   // Clear any pending exceptions.
   3146   __ LoadRoot(rax, Heap::kTheHoleValueRootIndex);
   3147   __ Store(pending_exception, rax);
   3148 
   3149   // Fake a receiver (NULL).
   3150   __ push(Immediate(0));  // receiver
   3151 
   3152   // Invoke the function by calling through JS entry trampoline builtin and
   3153   // pop the faked function when we return. We load the address from an
   3154   // external reference instead of inlining the call target address directly
   3155   // in the code, because the builtin stubs may not have been generated yet
   3156   // at the time this code is generated.
   3157   if (is_construct) {
   3158     ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
   3159                                       isolate);
   3160     __ Load(rax, construct_entry);
   3161   } else {
   3162     ExternalReference entry(Builtins::kJSEntryTrampoline, isolate);
   3163     __ Load(rax, entry);
   3164   }
   3165   __ lea(kScratchRegister, FieldOperand(rax, Code::kHeaderSize));
   3166   __ call(kScratchRegister);
   3167 
   3168   // Unlink this frame from the handler chain.
   3169   __ PopTryHandler();
   3170 
   3171   __ bind(&exit);
   3172   // Check if the current stack frame is marked as the outermost JS frame.
   3173   __ pop(rbx);
   3174   __ Cmp(rbx, Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME));
   3175   __ j(not_equal, &not_outermost_js_2);
   3176   __ Move(kScratchRegister, js_entry_sp);
   3177   __ movq(Operand(kScratchRegister, 0), Immediate(0));
   3178   __ bind(&not_outermost_js_2);
   3179 
   3180   // Restore the top frame descriptor from the stack.
   3181   { Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
   3182     __ pop(c_entry_fp_operand);
   3183   }
   3184 
   3185   // Restore callee-saved registers (X64 conventions).
   3186 #ifdef _WIN64
   3187   // On Win64 XMM6-XMM15 are callee-save
   3188   __ movdqu(xmm6, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0));
   3189   __ movdqu(xmm7, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1));
   3190   __ movdqu(xmm8, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2));
   3191   __ movdqu(xmm9, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3));
   3192   __ movdqu(xmm10, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4));
   3193   __ movdqu(xmm11, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5));
   3194   __ movdqu(xmm12, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6));
   3195   __ movdqu(xmm13, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7));
   3196   __ movdqu(xmm14, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8));
   3197   __ movdqu(xmm15, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9));
   3198   __ addq(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize));
   3199 #endif
   3200 
   3201   __ pop(rbx);
   3202 #ifdef _WIN64
   3203   // Callee save on in Win64 ABI, arguments/volatile in AMD64 ABI.
   3204   __ pop(rsi);
   3205   __ pop(rdi);
   3206 #endif
   3207   __ pop(r15);
   3208   __ pop(r14);
   3209   __ pop(r13);
   3210   __ pop(r12);
   3211   __ addq(rsp, Immediate(2 * kPointerSize));  // remove markers
   3212 
   3213   // Restore frame pointer and return.
   3214   __ pop(rbp);
   3215   __ ret(0);
   3216 }
   3217 
   3218 
   3219 void InstanceofStub::Generate(MacroAssembler* masm) {
   3220   // Implements "value instanceof function" operator.
   3221   // Expected input state with no inline cache:
   3222   //   rsp[0]  : return address
   3223   //   rsp[8]  : function pointer
   3224   //   rsp[16] : value
   3225   // Expected input state with an inline one-element cache:
   3226   //   rsp[0]  : return address
   3227   //   rsp[8]  : offset from return address to location of inline cache
   3228   //   rsp[16] : function pointer
   3229   //   rsp[24] : value
   3230   // Returns a bitwise zero to indicate that the value
   3231   // is and instance of the function and anything else to
   3232   // indicate that the value is not an instance.
   3233 
   3234   static const int kOffsetToMapCheckValue = 2;
   3235   static const int kOffsetToResultValue = 18;
   3236   // The last 4 bytes of the instruction sequence
   3237   //   movq(rdi, FieldOperand(rax, HeapObject::kMapOffset))
   3238   //   Move(kScratchRegister, Factory::the_hole_value())
   3239   // in front of the hole value address.
   3240   static const unsigned int kWordBeforeMapCheckValue = 0xBA49FF78;
   3241   // The last 4 bytes of the instruction sequence
   3242   //   __ j(not_equal, &cache_miss);
   3243   //   __ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex);
   3244   // before the offset of the hole value in the root array.
   3245   static const unsigned int kWordBeforeResultValue = 0x458B4906;
   3246   // Only the inline check flag is supported on X64.
   3247   ASSERT(flags_ == kNoFlags || HasCallSiteInlineCheck());
   3248   int extra_argument_offset = HasCallSiteInlineCheck() ? 1 : 0;
   3249 
   3250   // Get the object - go slow case if it's a smi.
   3251   Label slow;
   3252   StackArgumentsAccessor args(rsp, 2 + extra_argument_offset,
   3253                               ARGUMENTS_DONT_CONTAIN_RECEIVER);
   3254   __ movq(rax, args.GetArgumentOperand(0));
   3255   __ JumpIfSmi(rax, &slow);
   3256 
   3257   // Check that the left hand is a JS object. Leave its map in rax.
   3258   __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rax);
   3259   __ j(below, &slow);
   3260   __ CmpInstanceType(rax, LAST_SPEC_OBJECT_TYPE);
   3261   __ j(above, &slow);
   3262 
   3263   // Get the prototype of the function.
   3264   __ movq(rdx, args.GetArgumentOperand(1));
   3265   // rdx is function, rax is map.
   3266 
   3267   // If there is a call site cache don't look in the global cache, but do the
   3268   // real lookup and update the call site cache.
   3269   if (!HasCallSiteInlineCheck()) {
   3270     // Look up the function and the map in the instanceof cache.
   3271     Label miss;
   3272     __ CompareRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex);
   3273     __ j(not_equal, &miss, Label::kNear);
   3274     __ CompareRoot(rax, Heap::kInstanceofCacheMapRootIndex);
   3275     __ j(not_equal, &miss, Label::kNear);
   3276     __ LoadRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
   3277     __ ret(2 * kPointerSize);
   3278     __ bind(&miss);
   3279   }
   3280 
   3281   __ TryGetFunctionPrototype(rdx, rbx, &slow, true);
   3282 
   3283   // Check that the function prototype is a JS object.
   3284   __ JumpIfSmi(rbx, &slow);
   3285   __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, kScratchRegister);
   3286   __ j(below, &slow);
   3287   __ CmpInstanceType(kScratchRegister, LAST_SPEC_OBJECT_TYPE);
   3288   __ j(above, &slow);
   3289 
   3290   // Register mapping:
   3291   //   rax is object map.
   3292   //   rdx is function.
   3293   //   rbx is function prototype.
   3294   if (!HasCallSiteInlineCheck()) {
   3295     __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex);
   3296     __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex);
   3297   } else {
   3298     // Get return address and delta to inlined map check.
   3299     __ movq(kScratchRegister, StackOperandForReturnAddress(0));
   3300     __ subq(kScratchRegister, args.GetArgumentOperand(2));
   3301     if (FLAG_debug_code) {
   3302       __ movl(rdi, Immediate(kWordBeforeMapCheckValue));
   3303       __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi);
   3304       __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCheck);
   3305     }
   3306     __ movq(kScratchRegister,
   3307             Operand(kScratchRegister, kOffsetToMapCheckValue));
   3308     __ movq(Operand(kScratchRegister, 0), rax);
   3309   }
   3310 
   3311   __ movq(rcx, FieldOperand(rax, Map::kPrototypeOffset));
   3312 
   3313   // Loop through the prototype chain looking for the function prototype.
   3314   Label loop, is_instance, is_not_instance;
   3315   __ LoadRoot(kScratchRegister, Heap::kNullValueRootIndex);
   3316   __ bind(&loop);
   3317   __ cmpq(rcx, rbx);
   3318   __ j(equal, &is_instance, Label::kNear);
   3319   __ cmpq(rcx, kScratchRegister);
   3320   // The code at is_not_instance assumes that kScratchRegister contains a
   3321   // non-zero GCable value (the null object in this case).
   3322   __ j(equal, &is_not_instance, Label::kNear);
   3323   __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
   3324   __ movq(rcx, FieldOperand(rcx, Map::kPrototypeOffset));
   3325   __ jmp(&loop);
   3326 
   3327   __ bind(&is_instance);
   3328   if (!HasCallSiteInlineCheck()) {
   3329     __ xorl(rax, rax);
   3330     // Store bitwise zero in the cache.  This is a Smi in GC terms.
   3331     STATIC_ASSERT(kSmiTag == 0);
   3332     __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
   3333   } else {
   3334     // Store offset of true in the root array at the inline check site.
   3335     int true_offset = 0x100 +
   3336         (Heap::kTrueValueRootIndex << kPointerSizeLog2) - kRootRegisterBias;
   3337     // Assert it is a 1-byte signed value.
   3338     ASSERT(true_offset >= 0 && true_offset < 0x100);
   3339     __ movl(rax, Immediate(true_offset));
   3340     __ movq(kScratchRegister, StackOperandForReturnAddress(0));
   3341     __ subq(kScratchRegister, args.GetArgumentOperand(2));
   3342     __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax);
   3343     if (FLAG_debug_code) {
   3344       __ movl(rax, Immediate(kWordBeforeResultValue));
   3345       __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
   3346       __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
   3347     }
   3348     __ Set(rax, 0);
   3349   }
   3350   __ ret((2 + extra_argument_offset) * kPointerSize);
   3351 
   3352   __ bind(&is_not_instance);
   3353   if (!HasCallSiteInlineCheck()) {
   3354     // We have to store a non-zero value in the cache.
   3355     __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex);
   3356   } else {
   3357     // Store offset of false in the root array at the inline check site.
   3358     int false_offset = 0x100 +
   3359         (Heap::kFalseValueRootIndex << kPointerSizeLog2) - kRootRegisterBias;
   3360     // Assert it is a 1-byte signed value.
   3361     ASSERT(false_offset >= 0 && false_offset < 0x100);
   3362     __ movl(rax, Immediate(false_offset));
   3363     __ movq(kScratchRegister, StackOperandForReturnAddress(0));
   3364     __ subq(kScratchRegister, args.GetArgumentOperand(2));
   3365     __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax);
   3366     if (FLAG_debug_code) {
   3367       __ movl(rax, Immediate(kWordBeforeResultValue));
   3368       __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
   3369       __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
   3370     }
   3371   }
   3372   __ ret((2 + extra_argument_offset) * kPointerSize);
   3373 
   3374   // Slow-case: Go through the JavaScript implementation.
   3375   __ bind(&slow);
   3376   if (HasCallSiteInlineCheck()) {
   3377     // Remove extra value from the stack.
   3378     __ PopReturnAddressTo(rcx);
   3379     __ pop(rax);
   3380     __ PushReturnAddressFrom(rcx);
   3381   }
   3382   __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
   3383 }
   3384 
   3385 
   3386 // Passing arguments in registers is not supported.
   3387 Register InstanceofStub::left() { return no_reg; }
   3388 
   3389 
   3390 Register InstanceofStub::right() { return no_reg; }
   3391 
   3392 
   3393 // -------------------------------------------------------------------------
   3394 // StringCharCodeAtGenerator
   3395 
   3396 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
   3397   Label flat_string;
   3398   Label ascii_string;
   3399   Label got_char_code;
   3400   Label sliced_string;
   3401 
   3402   // If the receiver is a smi trigger the non-string case.
   3403   __ JumpIfSmi(object_, receiver_not_string_);
   3404 
   3405   // Fetch the instance type of the receiver into result register.
   3406   __ movq(result_, FieldOperand(object_, HeapObject::kMapOffset));
   3407   __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
   3408   // If the receiver is not a string trigger the non-string case.
   3409   __ testb(result_, Immediate(kIsNotStringMask));
   3410   __ j(not_zero, receiver_not_string_);
   3411 
   3412   // If the index is non-smi trigger the non-smi case.
   3413   __ JumpIfNotSmi(index_, &index_not_smi_);
   3414   __ bind(&got_smi_index_);
   3415 
   3416   // Check for index out of range.
   3417   __ SmiCompare(index_, FieldOperand(object_, String::kLengthOffset));
   3418   __ j(above_equal, index_out_of_range_);
   3419 
   3420   __ SmiToInteger32(index_, index_);
   3421 
   3422   StringCharLoadGenerator::Generate(
   3423       masm, object_, index_, result_, &call_runtime_);
   3424 
   3425   __ Integer32ToSmi(result_, result_);
   3426   __ bind(&exit_);
   3427 }
   3428 
   3429 
   3430 void StringCharCodeAtGenerator::GenerateSlow(
   3431     MacroAssembler* masm,
   3432     const RuntimeCallHelper& call_helper) {
   3433   __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
   3434 
   3435   Factory* factory = masm->isolate()->factory();
   3436   // Index is not a smi.
   3437   __ bind(&index_not_smi_);
   3438   // If index is a heap number, try converting it to an integer.
   3439   __ CheckMap(index_,
   3440               factory->heap_number_map(),
   3441               index_not_number_,
   3442               DONT_DO_SMI_CHECK);
   3443   call_helper.BeforeCall(masm);
   3444   __ push(object_);
   3445   __ push(index_);  // Consumed by runtime conversion function.
   3446   if (index_flags_ == STRING_INDEX_IS_NUMBER) {
   3447     __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
   3448   } else {
   3449     ASSERT(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
   3450     // NumberToSmi discards numbers that are not exact integers.
   3451     __ CallRuntime(Runtime::kNumberToSmi, 1);
   3452   }
   3453   if (!index_.is(rax)) {
   3454     // Save the conversion result before the pop instructions below
   3455     // have a chance to overwrite it.
   3456     __ movq(index_, rax);
   3457   }
   3458   __ pop(object_);
   3459   // Reload the instance type.
   3460   __ movq(result_, FieldOperand(object_, HeapObject::kMapOffset));
   3461   __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
   3462   call_helper.AfterCall(masm);
   3463   // If index is still not a smi, it must be out of range.
   3464   __ JumpIfNotSmi(index_, index_out_of_range_);
   3465   // Otherwise, return to the fast path.
   3466   __ jmp(&got_smi_index_);
   3467 
   3468   // Call runtime. We get here when the receiver is a string and the
   3469   // index is a number, but the code of getting the actual character
   3470   // is too complex (e.g., when the string needs to be flattened).
   3471   __ bind(&call_runtime_);
   3472   call_helper.BeforeCall(masm);
   3473   __ push(object_);
   3474   __ Integer32ToSmi(index_, index_);
   3475   __ push(index_);
   3476   __ CallRuntime(Runtime::kStringCharCodeAt, 2);
   3477   if (!result_.is(rax)) {
   3478     __ movq(result_, rax);
   3479   }
   3480   call_helper.AfterCall(masm);
   3481   __ jmp(&exit_);
   3482 
   3483   __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
   3484 }
   3485 
   3486 
   3487 // -------------------------------------------------------------------------
   3488 // StringCharFromCodeGenerator
   3489 
   3490 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
   3491   // Fast case of Heap::LookupSingleCharacterStringFromCode.
   3492   __ JumpIfNotSmi(code_, &slow_case_);
   3493   __ SmiCompare(code_, Smi::FromInt(String::kMaxOneByteCharCode));
   3494   __ j(above, &slow_case_);
   3495 
   3496   __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
   3497   SmiIndex index = masm->SmiToIndex(kScratchRegister, code_, kPointerSizeLog2);
   3498   __ movq(result_, FieldOperand(result_, index.reg, index.scale,
   3499                                 FixedArray::kHeaderSize));
   3500   __ CompareRoot(result_, Heap::kUndefinedValueRootIndex);
   3501   __ j(equal, &slow_case_);
   3502   __ bind(&exit_);
   3503 }
   3504 
   3505 
   3506 void StringCharFromCodeGenerator::GenerateSlow(
   3507     MacroAssembler* masm,
   3508     const RuntimeCallHelper& call_helper) {
   3509   __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
   3510 
   3511   __ bind(&slow_case_);
   3512   call_helper.BeforeCall(masm);
   3513   __ push(code_);
   3514   __ CallRuntime(Runtime::kCharFromCode, 1);
   3515   if (!result_.is(rax)) {
   3516     __ movq(result_, rax);
   3517   }
   3518   call_helper.AfterCall(masm);
   3519   __ jmp(&exit_);
   3520 
   3521   __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
   3522 }
   3523 
   3524 
   3525 void StringAddStub::Generate(MacroAssembler* masm) {
   3526   Label call_runtime, call_builtin;
   3527   Builtins::JavaScript builtin_id = Builtins::ADD;
   3528 
   3529   // Load the two arguments.
   3530   StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER);
   3531   __ movq(rax, args.GetArgumentOperand(0));  // First argument (left).
   3532   __ movq(rdx, args.GetArgumentOperand(1));  // Second argument (right).
   3533 
   3534   // Make sure that both arguments are strings if not known in advance.
   3535   // Otherwise, at least one of the arguments is definitely a string,
   3536   // and we convert the one that is not known to be a string.
   3537   if ((flags_ & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_BOTH) {
   3538     ASSERT((flags_ & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT);
   3539     ASSERT((flags_ & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT);
   3540     __ JumpIfSmi(rax, &call_runtime);
   3541     __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, r8);
   3542     __ j(above_equal, &call_runtime);
   3543 
   3544     // First argument is a a string, test second.
   3545     __ JumpIfSmi(rdx, &call_runtime);
   3546     __ CmpObjectType(rdx, FIRST_NONSTRING_TYPE, r9);
   3547     __ j(above_equal, &call_runtime);
   3548   } else if ((flags_ & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) {
   3549     ASSERT((flags_ & STRING_ADD_CHECK_RIGHT) == 0);
   3550     GenerateConvertArgument(masm, 2 * kPointerSize, rax, rbx, rcx, rdi,
   3551                             &call_builtin);
   3552     builtin_id = Builtins::STRING_ADD_RIGHT;
   3553   } else if ((flags_ & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) {
   3554     ASSERT((flags_ & STRING_ADD_CHECK_LEFT) == 0);
   3555     GenerateConvertArgument(masm, 1 * kPointerSize, rdx, rbx, rcx, rdi,
   3556                             &call_builtin);
   3557     builtin_id = Builtins::STRING_ADD_LEFT;
   3558   }
   3559 
   3560   // Both arguments are strings.
   3561   // rax: first string
   3562   // rdx: second string
   3563   // Check if either of the strings are empty. In that case return the other.
   3564   Label second_not_zero_length, both_not_zero_length;
   3565   __ movq(rcx, FieldOperand(rdx, String::kLengthOffset));
   3566   __ SmiTest(rcx);
   3567   __ j(not_zero, &second_not_zero_length, Label::kNear);
   3568   // Second string is empty, result is first string which is already in rax.
   3569   Counters* counters = masm->isolate()->counters();
   3570   __ IncrementCounter(counters->string_add_native(), 1);
   3571   __ ret(2 * kPointerSize);
   3572   __ bind(&second_not_zero_length);
   3573   __ movq(rbx, FieldOperand(rax, String::kLengthOffset));
   3574   __ SmiTest(rbx);
   3575   __ j(not_zero, &both_not_zero_length, Label::kNear);
   3576   // First string is empty, result is second string which is in rdx.
   3577   __ movq(rax, rdx);
   3578   __ IncrementCounter(counters->string_add_native(), 1);
   3579   __ ret(2 * kPointerSize);
   3580 
   3581   // Both strings are non-empty.
   3582   // rax: first string
   3583   // rbx: length of first string
   3584   // rcx: length of second string
   3585   // rdx: second string
   3586   // r8: map of first string (if flags_ == NO_STRING_ADD_FLAGS)
   3587   // r9: map of second string (if flags_ == NO_STRING_ADD_FLAGS)
   3588   Label string_add_flat_result, longer_than_two;
   3589   __ bind(&both_not_zero_length);
   3590 
   3591   // If arguments where known to be strings, maps are not loaded to r8 and r9
   3592   // by the code above.
   3593   if ((flags_ & STRING_ADD_CHECK_BOTH) != STRING_ADD_CHECK_BOTH) {
   3594     __ movq(r8, FieldOperand(rax, HeapObject::kMapOffset));
   3595     __ movq(r9, FieldOperand(rdx, HeapObject::kMapOffset));
   3596   }
   3597   // Get the instance types of the two strings as they will be needed soon.
   3598   __ movzxbl(r8, FieldOperand(r8, Map::kInstanceTypeOffset));
   3599   __ movzxbl(r9, FieldOperand(r9, Map::kInstanceTypeOffset));
   3600 
   3601   // Look at the length of the result of adding the two strings.
   3602   STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue / 2);
   3603   __ SmiAdd(rbx, rbx, rcx);
   3604   // Use the string table when adding two one character strings, as it
   3605   // helps later optimizations to return an internalized string here.
   3606   __ SmiCompare(rbx, Smi::FromInt(2));
   3607   __ j(not_equal, &longer_than_two);
   3608 
   3609   // Check that both strings are non-external ASCII strings.
   3610   __ JumpIfBothInstanceTypesAreNotSequentialAscii(r8, r9, rbx, rcx,
   3611                                                   &call_runtime);
   3612 
   3613   // Get the two characters forming the sub string.
   3614   __ movzxbq(rbx, FieldOperand(rax, SeqOneByteString::kHeaderSize));
   3615   __ movzxbq(rcx, FieldOperand(rdx, SeqOneByteString::kHeaderSize));
   3616 
   3617   // Try to lookup two character string in string table. If it is not found
   3618   // just allocate a new one.
   3619   Label make_two_character_string, make_flat_ascii_string;
   3620   StringHelper::GenerateTwoCharacterStringTableProbe(
   3621       masm, rbx, rcx, r14, r11, rdi, r15, &make_two_character_string);
   3622   __ IncrementCounter(counters->string_add_native(), 1);
   3623   __ ret(2 * kPointerSize);
   3624 
   3625   __ bind(&make_two_character_string);
   3626   __ Set(rdi, 2);
   3627   __ AllocateAsciiString(rax, rdi, r8, r9, r11, &call_runtime);
   3628   // rbx - first byte: first character
   3629   // rbx - second byte: *maybe* second character
   3630   // Make sure that the second byte of rbx contains the second character.
   3631   __ movzxbq(rcx, FieldOperand(rdx, SeqOneByteString::kHeaderSize));
   3632   __ shll(rcx, Immediate(kBitsPerByte));
   3633   __ orl(rbx, rcx);
   3634   // Write both characters to the new string.
   3635   __ movw(FieldOperand(rax, SeqOneByteString::kHeaderSize), rbx);
   3636   __ IncrementCounter(counters->string_add_native(), 1);
   3637   __ ret(2 * kPointerSize);
   3638 
   3639   __ bind(&longer_than_two);
   3640   // Check if resulting string will be flat.
   3641   __ SmiCompare(rbx, Smi::FromInt(ConsString::kMinLength));
   3642   __ j(below, &string_add_flat_result);
   3643   // Handle exceptionally long strings in the runtime system.
   3644   STATIC_ASSERT((String::kMaxLength & 0x80000000) == 0);
   3645   __ SmiCompare(rbx, Smi::FromInt(String::kMaxLength));
   3646   __ j(above, &call_runtime);
   3647 
   3648   // If result is not supposed to be flat, allocate a cons string object. If
   3649   // both strings are ASCII the result is an ASCII cons string.
   3650   // rax: first string
   3651   // rbx: length of resulting flat string
   3652   // rdx: second string
   3653   // r8: instance type of first string
   3654   // r9: instance type of second string
   3655   Label non_ascii, allocated, ascii_data;
   3656   __ movl(rcx, r8);
   3657   __ and_(rcx, r9);
   3658   STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
   3659   STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
   3660   __ testl(rcx, Immediate(kStringEncodingMask));
   3661   __ j(zero, &non_ascii);
   3662   __ bind(&ascii_data);
   3663   // Allocate an ASCII cons string.
   3664   __ AllocateAsciiConsString(rcx, rdi, no_reg, &call_runtime);
   3665   __ bind(&allocated);
   3666   // Fill the fields of the cons string.
   3667   __ movq(FieldOperand(rcx, ConsString::kLengthOffset), rbx);
   3668   __ movq(FieldOperand(rcx, ConsString::kHashFieldOffset),
   3669           Immediate(String::kEmptyHashField));
   3670 
   3671   Label skip_write_barrier, after_writing;
   3672   ExternalReference high_promotion_mode = ExternalReference::
   3673       new_space_high_promotion_mode_active_address(masm->isolate());
   3674   __ Load(rbx, high_promotion_mode);
   3675   __ testb(rbx, Immediate(1));
   3676   __ j(zero, &skip_write_barrier);
   3677 
   3678   __ movq(FieldOperand(rcx, ConsString::kFirstOffset), rax);
   3679   __ RecordWriteField(rcx,
   3680                       ConsString::kFirstOffset,
   3681                       rax,
   3682                       rbx,
   3683                       kDontSaveFPRegs);
   3684   __ movq(FieldOperand(rcx, ConsString::kSecondOffset), rdx);
   3685   __ RecordWriteField(rcx,
   3686                       ConsString::kSecondOffset,
   3687                       rdx,
   3688                       rbx,
   3689                       kDontSaveFPRegs);
   3690   __ jmp(&after_writing);
   3691 
   3692   __ bind(&skip_write_barrier);
   3693   __ movq(FieldOperand(rcx, ConsString::kFirstOffset), rax);
   3694   __ movq(FieldOperand(rcx, ConsString::kSecondOffset), rdx);
   3695 
   3696   __ bind(&after_writing);
   3697 
   3698   __ movq(rax, rcx);
   3699   __ IncrementCounter(counters->string_add_native(), 1);
   3700   __ ret(2 * kPointerSize);
   3701   __ bind(&non_ascii);
   3702   // At least one of the strings is two-byte. Check whether it happens
   3703   // to contain only one byte characters.
   3704   // rcx: first instance type AND second instance type.
   3705   // r8: first instance type.
   3706   // r9: second instance type.
   3707   __ testb(rcx, Immediate(kOneByteDataHintMask));
   3708   __ j(not_zero, &ascii_data);
   3709   __ xor_(r8, r9);
   3710   STATIC_ASSERT(kOneByteStringTag != 0 && kOneByteDataHintTag != 0);
   3711   __ andb(r8, Immediate(kOneByteStringTag | kOneByteDataHintTag));
   3712   __ cmpb(r8, Immediate(kOneByteStringTag | kOneByteDataHintTag));
   3713   __ j(equal, &ascii_data);
   3714   // Allocate a two byte cons string.
   3715   __ AllocateTwoByteConsString(rcx, rdi, no_reg, &call_runtime);
   3716   __ jmp(&allocated);
   3717 
   3718   // We cannot encounter sliced strings or cons strings here since:
   3719   STATIC_ASSERT(SlicedString::kMinLength >= ConsString::kMinLength);
   3720   // Handle creating a flat result from either external or sequential strings.
   3721   // Locate the first characters' locations.
   3722   // rax: first string
   3723   // rbx: length of resulting flat string as smi
   3724   // rdx: second string
   3725   // r8: instance type of first string
   3726   // r9: instance type of first string
   3727   Label first_prepared, second_prepared;
   3728   Label first_is_sequential, second_is_sequential;
   3729   __ bind(&string_add_flat_result);
   3730 
   3731   __ SmiToInteger32(r14, FieldOperand(rax, SeqString::kLengthOffset));
   3732   // r14: length of first string
   3733   STATIC_ASSERT(kSeqStringTag == 0);
   3734   __ testb(r8, Immediate(kStringRepresentationMask));
   3735   __ j(zero, &first_is_sequential, Label::kNear);
   3736   // Rule out short external string and load string resource.
   3737   STATIC_ASSERT(kShortExternalStringTag != 0);
   3738   __ testb(r8, Immediate(kShortExternalStringMask));
   3739   __ j(not_zero, &call_runtime);
   3740   __ movq(rcx, FieldOperand(rax, ExternalString::kResourceDataOffset));
   3741   __ jmp(&first_prepared, Label::kNear);
   3742   __ bind(&first_is_sequential);
   3743   STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize);
   3744   __ lea(rcx, FieldOperand(rax, SeqOneByteString::kHeaderSize));
   3745   __ bind(&first_prepared);
   3746 
   3747   // Check whether both strings have same encoding.
   3748   __ xorl(r8, r9);
   3749   __ testb(r8, Immediate(kStringEncodingMask));
   3750   __ j(not_zero, &call_runtime);
   3751 
   3752   __ SmiToInteger32(r15, FieldOperand(rdx, SeqString::kLengthOffset));
   3753   // r15: length of second string
   3754   STATIC_ASSERT(kSeqStringTag == 0);
   3755   __ testb(r9, Immediate(kStringRepresentationMask));
   3756   __ j(zero, &second_is_sequential, Label::kNear);
   3757   // Rule out short external string and load string resource.
   3758   STATIC_ASSERT(kShortExternalStringTag != 0);
   3759   __ testb(r9, Immediate(kShortExternalStringMask));
   3760   __ j(not_zero, &call_runtime);
   3761   __ movq(rdx, FieldOperand(rdx, ExternalString::kResourceDataOffset));
   3762   __ jmp(&second_prepared, Label::kNear);
   3763   __ bind(&second_is_sequential);
   3764   STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize);
   3765   __ lea(rdx, FieldOperand(rdx, SeqOneByteString::kHeaderSize));
   3766   __ bind(&second_prepared);
   3767 
   3768   Label non_ascii_string_add_flat_result;
   3769   // r9: instance type of second string
   3770   // First string and second string have the same encoding.
   3771   STATIC_ASSERT(kTwoByteStringTag == 0);
   3772   __ SmiToInteger32(rbx, rbx);
   3773   __ testb(r9, Immediate(kStringEncodingMask));
   3774   __ j(zero, &non_ascii_string_add_flat_result);
   3775 
   3776   __ bind(&make_flat_ascii_string);
   3777   // Both strings are ASCII strings. As they are short they are both flat.
   3778   __ AllocateAsciiString(rax, rbx, rdi, r8, r9, &call_runtime);
   3779   // rax: result string
   3780   // Locate first character of result.
   3781   __ lea(rbx, FieldOperand(rax, SeqOneByteString::kHeaderSize));
   3782   // rcx: first char of first string
   3783   // rbx: first character of result
   3784   // r14: length of first string
   3785   StringHelper::GenerateCopyCharacters(masm, rbx, rcx, r14, true);
   3786   // rbx: next character of result
   3787   // rdx: first char of second string
   3788   // r15: length of second string
   3789   StringHelper::GenerateCopyCharacters(masm, rbx, rdx, r15, true);
   3790   __ IncrementCounter(counters->string_add_native(), 1);
   3791   __ ret(2 * kPointerSize);
   3792 
   3793   __ bind(&non_ascii_string_add_flat_result);
   3794   // Both strings are ASCII strings. As they are short they are both flat.
   3795   __ AllocateTwoByteString(rax, rbx, rdi, r8, r9, &call_runtime);
   3796   // rax: result string
   3797   // Locate first character of result.
   3798   __ lea(rbx, FieldOperand(rax, SeqTwoByteString::kHeaderSize));
   3799   // rcx: first char of first string
   3800   // rbx: first character of result
   3801   // r14: length of first string
   3802   StringHelper::GenerateCopyCharacters(masm, rbx, rcx, r14, false);
   3803   // rbx: next character of result
   3804   // rdx: first char of second string
   3805   // r15: length of second string
   3806   StringHelper::GenerateCopyCharacters(masm, rbx, rdx, r15, false);
   3807   __ IncrementCounter(counters->string_add_native(), 1);
   3808   __ ret(2 * kPointerSize);
   3809 
   3810   // Just jump to runtime to add the two strings.
   3811   __ bind(&call_runtime);
   3812   __ TailCallRuntime(Runtime::kStringAdd, 2, 1);
   3813 
   3814   if (call_builtin.is_linked()) {
   3815     __ bind(&call_builtin);
   3816     __ InvokeBuiltin(builtin_id, JUMP_FUNCTION);
   3817   }
   3818 }
   3819 
   3820 
   3821 void StringAddStub::GenerateRegisterArgsPush(MacroAssembler* masm) {
   3822   __ push(rax);
   3823   __ push(rdx);
   3824 }
   3825 
   3826 
   3827 void StringAddStub::GenerateRegisterArgsPop(MacroAssembler* masm,
   3828                                             Register temp) {
   3829   __ PopReturnAddressTo(temp);
   3830   __ pop(rdx);
   3831   __ pop(rax);
   3832   __ PushReturnAddressFrom(temp);
   3833 }
   3834 
   3835 
   3836 void StringAddStub::GenerateConvertArgument(MacroAssembler* masm,
   3837                                             int stack_offset,
   3838                                             Register arg,
   3839                                             Register scratch1,
   3840                                             Register scratch2,
   3841                                             Register scratch3,
   3842                                             Label* slow) {
   3843   // First check if the argument is already a string.
   3844   Label not_string, done;
   3845   __ JumpIfSmi(arg, &not_string);
   3846   __ CmpObjectType(arg, FIRST_NONSTRING_TYPE, scratch1);
   3847   __ j(below, &done);
   3848 
   3849   // Check the number to string cache.
   3850   __ bind(&not_string);
   3851   // Puts the cached result into scratch1.
   3852   __ LookupNumberStringCache(arg, scratch1, scratch2, scratch3, slow);
   3853   __ movq(arg, scratch1);
   3854   __ movq(Operand(rsp, stack_offset), arg);
   3855   __ bind(&done);
   3856 }
   3857 
   3858 
   3859 void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
   3860                                           Register dest,
   3861                                           Register src,
   3862                                           Register count,
   3863                                           bool ascii) {
   3864   Label loop;
   3865   __ bind(&loop);
   3866   // This loop just copies one character at a time, as it is only used for very
   3867   // short strings.
   3868   if (ascii) {
   3869     __ movb(kScratchRegister, Operand(src, 0));
   3870     __ movb(Operand(dest, 0), kScratchRegister);
   3871     __ incq(src);
   3872     __ incq(dest);
   3873   } else {
   3874     __ movzxwl(kScratchRegister, Operand(src, 0));
   3875     __ movw(Operand(dest, 0), kScratchRegister);
   3876     __ addq(src, Immediate(2));
   3877     __ addq(dest, Immediate(2));
   3878   }
   3879   __ decl(count);
   3880   __ j(not_zero, &loop);
   3881 }
   3882 
   3883 
   3884 void StringHelper::GenerateCopyCharactersREP(MacroAssembler* masm,
   3885                                              Register dest,
   3886                                              Register src,
   3887                                              Register count,
   3888                                              bool ascii) {
   3889   // Copy characters using rep movs of doublewords. Align destination on 4 byte
   3890   // boundary before starting rep movs. Copy remaining characters after running
   3891   // rep movs.
   3892   // Count is positive int32, dest and src are character pointers.
   3893   ASSERT(dest.is(rdi));  // rep movs destination
   3894   ASSERT(src.is(rsi));  // rep movs source
   3895   ASSERT(count.is(rcx));  // rep movs count
   3896 
   3897   // Nothing to do for zero characters.
   3898   Label done;
   3899   __ testl(count, count);
   3900   __ j(zero, &done, Label::kNear);
   3901 
   3902   // Make count the number of bytes to copy.
   3903   if (!ascii) {
   3904     STATIC_ASSERT(2 == sizeof(uc16));
   3905     __ addl(count, count);
   3906   }
   3907 
   3908   // Don't enter the rep movs if there are less than 4 bytes to copy.
   3909   Label last_bytes;
   3910   __ testl(count, Immediate(~(kPointerSize - 1)));
   3911   __ j(zero, &last_bytes, Label::kNear);
   3912 
   3913   // Copy from edi to esi using rep movs instruction.
   3914   __ movl(kScratchRegister, count);
   3915   __ shr(count, Immediate(kPointerSizeLog2));  // Number of doublewords to copy.
   3916   __ repmovsq();
   3917 
   3918   // Find number of bytes left.
   3919   __ movl(count, kScratchRegister);
   3920   __ and_(count, Immediate(kPointerSize - 1));
   3921 
   3922   // Check if there are more bytes to copy.
   3923   __ bind(&last_bytes);
   3924   __ testl(count, count);
   3925   __ j(zero, &done, Label::kNear);
   3926 
   3927   // Copy remaining characters.
   3928   Label loop;
   3929   __ bind(&loop);
   3930   __ movb(kScratchRegister, Operand(src, 0));
   3931   __ movb(Operand(dest, 0), kScratchRegister);
   3932   __ incq(src);
   3933   __ incq(dest);
   3934   __ decl(count);
   3935   __ j(not_zero, &loop);
   3936 
   3937   __ bind(&done);
   3938 }
   3939 
   3940 void StringHelper::GenerateTwoCharacterStringTableProbe(MacroAssembler* masm,
   3941                                                         Register c1,
   3942                                                         Register c2,
   3943                                                         Register scratch1,
   3944                                                         Register scratch2,
   3945                                                         Register scratch3,
   3946                                                         Register scratch4,
   3947                                                         Label* not_found) {
   3948   // Register scratch3 is the general scratch register in this function.
   3949   Register scratch = scratch3;
   3950 
   3951   // Make sure that both characters are not digits as such strings has a
   3952   // different hash algorithm. Don't try to look for these in the string table.
   3953   Label not_array_index;
   3954   __ leal(scratch, Operand(c1, -'0'));
   3955   __ cmpl(scratch, Immediate(static_cast<int>('9' - '0')));
   3956   __ j(above, &not_array_index, Label::kNear);
   3957   __ leal(scratch, Operand(c2, -'0'));
   3958   __ cmpl(scratch, Immediate(static_cast<int>('9' - '0')));
   3959   __ j(below_equal, not_found);
   3960 
   3961   __ bind(&not_array_index);
   3962   // Calculate the two character string hash.
   3963   Register hash = scratch1;
   3964   GenerateHashInit(masm, hash, c1, scratch);
   3965   GenerateHashAddCharacter(masm, hash, c2, scratch);
   3966   GenerateHashGetHash(masm, hash, scratch);
   3967 
   3968   // Collect the two characters in a register.
   3969   Register chars = c1;
   3970   __ shl(c2, Immediate(kBitsPerByte));
   3971   __ orl(chars, c2);
   3972 
   3973   // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
   3974   // hash:  hash of two character string.
   3975 
   3976   // Load the string table.
   3977   Register string_table = c2;
   3978   __ LoadRoot(string_table, Heap::kStringTableRootIndex);
   3979 
   3980   // Calculate capacity mask from the string table capacity.
   3981   Register mask = scratch2;
   3982   __ SmiToInteger32(mask,
   3983                     FieldOperand(string_table, StringTable::kCapacityOffset));
   3984   __ decl(mask);
   3985 
   3986   Register map = scratch4;
   3987 
   3988   // Registers
   3989   // chars:        two character string, char 1 in byte 0 and char 2 in byte 1.
   3990   // hash:         hash of two character string (32-bit int)
   3991   // string_table: string table
   3992   // mask:         capacity mask (32-bit int)
   3993   // map:          -
   3994   // scratch:      -
   3995 
   3996   // Perform a number of probes in the string table.
   3997   static const int kProbes = 4;
   3998   Label found_in_string_table;
   3999   Label next_probe[kProbes];
   4000   Register candidate = scratch;  // Scratch register contains candidate.
   4001   for (int i = 0; i < kProbes; i++) {
   4002     // Calculate entry in string table.
   4003     __ movl(scratch, hash);
   4004     if (i > 0) {
   4005       __ addl(scratch, Immediate(StringTable::GetProbeOffset(i)));
   4006     }
   4007     __ andl(scratch, mask);
   4008 
   4009     // Load the entry from the string table.
   4010     STATIC_ASSERT(StringTable::kEntrySize == 1);
   4011     __ movq(candidate,
   4012             FieldOperand(string_table,
   4013                          scratch,
   4014                          times_pointer_size,
   4015                          StringTable::kElementsStartOffset));
   4016 
   4017     // If entry is undefined no string with this hash can be found.
   4018     Label is_string;
   4019     __ CmpObjectType(candidate, ODDBALL_TYPE, map);
   4020     __ j(not_equal, &is_string, Label::kNear);
   4021 
   4022     __ CompareRoot(candidate, Heap::kUndefinedValueRootIndex);
   4023     __ j(equal, not_found);
   4024     // Must be the hole (deleted entry).
   4025     if (FLAG_debug_code) {
   4026       __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
   4027       __ cmpq(kScratchRegister, candidate);
   4028       __ Assert(equal, kOddballInStringTableIsNotUndefinedOrTheHole);
   4029     }
   4030     __ jmp(&next_probe[i]);
   4031 
   4032     __ bind(&is_string);
   4033 
   4034     // If length is not 2 the string is not a candidate.
   4035     __ SmiCompare(FieldOperand(candidate, String::kLengthOffset),
   4036                   Smi::FromInt(2));
   4037     __ j(not_equal, &next_probe[i]);
   4038 
   4039     // We use kScratchRegister as a temporary register in assumption that
   4040     // JumpIfInstanceTypeIsNotSequentialAscii does not use it implicitly
   4041     Register temp = kScratchRegister;
   4042 
   4043     // Check that the candidate is a non-external ASCII string.
   4044     __ movzxbl(temp, FieldOperand(map, Map::kInstanceTypeOffset));
   4045     __ JumpIfInstanceTypeIsNotSequentialAscii(
   4046         temp, temp, &next_probe[i]);
   4047 
   4048     // Check if the two characters match.
   4049     __ movl(temp, FieldOperand(candidate, SeqOneByteString::kHeaderSize));
   4050     __ andl(temp, Immediate(0x0000ffff));
   4051     __ cmpl(chars, temp);
   4052     __ j(equal, &found_in_string_table);
   4053     __ bind(&next_probe[i]);
   4054   }
   4055 
   4056   // No matching 2 character string found by probing.
   4057   __ jmp(not_found);
   4058 
   4059   // Scratch register contains result when we fall through to here.
   4060   Register result = candidate;
   4061   __ bind(&found_in_string_table);
   4062   if (!result.is(rax)) {
   4063     __ movq(rax, result);
   4064   }
   4065 }
   4066 
   4067 
   4068 void StringHelper::GenerateHashInit(MacroAssembler* masm,
   4069                                     Register hash,
   4070                                     Register character,
   4071                                     Register scratch) {
   4072   // hash = (seed + character) + ((seed + character) << 10);
   4073   __ LoadRoot(scratch, Heap::kHashSeedRootIndex);
   4074   __ SmiToInteger32(scratch, scratch);
   4075   __ addl(scratch, character);
   4076   __ movl(hash, scratch);
   4077   __ shll(scratch, Immediate(10));
   4078   __ addl(hash, scratch);
   4079   // hash ^= hash >> 6;
   4080   __ movl(scratch, hash);
   4081   __ shrl(scratch, Immediate(6));
   4082   __ xorl(hash, scratch);
   4083 }
   4084 
   4085 
   4086 void StringHelper::GenerateHashAddCharacter(MacroAssembler* masm,
   4087                                             Register hash,
   4088                                             Register character,
   4089                                             Register scratch) {
   4090   // hash += character;
   4091   __ addl(hash, character);
   4092   // hash += hash << 10;
   4093   __ movl(scratch, hash);
   4094   __ shll(scratch, Immediate(10));
   4095   __ addl(hash, scratch);
   4096   // hash ^= hash >> 6;
   4097   __ movl(scratch, hash);
   4098   __ shrl(scratch, Immediate(6));
   4099   __ xorl(hash, scratch);
   4100 }
   4101 
   4102 
   4103 void StringHelper::GenerateHashGetHash(MacroAssembler* masm,
   4104                                        Register hash,
   4105                                        Register scratch) {
   4106   // hash += hash << 3;
   4107   __ leal(hash, Operand(hash, hash, times_8, 0));
   4108   // hash ^= hash >> 11;
   4109   __ movl(scratch, hash);
   4110   __ shrl(scratch, Immediate(11));
   4111   __ xorl(hash, scratch);
   4112   // hash += hash << 15;
   4113   __ movl(scratch, hash);
   4114   __ shll(scratch, Immediate(15));
   4115   __ addl(hash, scratch);
   4116 
   4117   __ andl(hash, Immediate(String::kHashBitMask));
   4118 
   4119   // if (hash == 0) hash = 27;
   4120   Label hash_not_zero;
   4121   __ j(not_zero, &hash_not_zero);
   4122   __ Set(hash, StringHasher::kZeroHash);
   4123   __ bind(&hash_not_zero);
   4124 }
   4125 
   4126 
   4127 void SubStringStub::Generate(MacroAssembler* masm) {
   4128   Label runtime;
   4129 
   4130   // Stack frame on entry.
   4131   //  rsp[0]  : return address
   4132   //  rsp[8]  : to
   4133   //  rsp[16] : from
   4134   //  rsp[24] : string
   4135 
   4136   enum SubStringStubArgumentIndices {
   4137     STRING_ARGUMENT_INDEX,
   4138     FROM_ARGUMENT_INDEX,
   4139     TO_ARGUMENT_INDEX,
   4140     SUB_STRING_ARGUMENT_COUNT
   4141   };
   4142 
   4143   StackArgumentsAccessor args(rsp, SUB_STRING_ARGUMENT_COUNT,
   4144                               ARGUMENTS_DONT_CONTAIN_RECEIVER);
   4145 
   4146   // Make sure first argument is a string.
   4147   __ movq(rax, args.GetArgumentOperand(STRING_ARGUMENT_INDEX));
   4148   STATIC_ASSERT(kSmiTag == 0);
   4149   __ testl(rax, Immediate(kSmiTagMask));
   4150   __ j(zero, &runtime);
   4151   Condition is_string = masm->IsObjectStringType(rax, rbx, rbx);
   4152   __ j(NegateCondition(is_string), &runtime);
   4153 
   4154   // rax: string
   4155   // rbx: instance type
   4156   // Calculate length of sub string using the smi values.
   4157   __ movq(rcx, args.GetArgumentOperand(TO_ARGUMENT_INDEX));
   4158   __ movq(rdx, args.GetArgumentOperand(FROM_ARGUMENT_INDEX));
   4159   __ JumpUnlessBothNonNegativeSmi(rcx, rdx, &runtime);
   4160 
   4161   __ SmiSub(rcx, rcx, rdx);  // Overflow doesn't happen.
   4162   __ cmpq(rcx, FieldOperand(rax, String::kLengthOffset));
   4163   Label not_original_string;
   4164   // Shorter than original string's length: an actual substring.
   4165   __ j(below, &not_original_string, Label::kNear);
   4166   // Longer than original string's length or negative: unsafe arguments.
   4167   __ j(above, &runtime);
   4168   // Return original string.
   4169   Counters* counters = masm->isolate()->counters();
   4170   __ IncrementCounter(counters->sub_string_native(), 1);
   4171   __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
   4172   __ bind(&not_original_string);
   4173 
   4174   Label single_char;
   4175   __ SmiCompare(rcx, Smi::FromInt(1));
   4176   __ j(equal, &single_char);
   4177 
   4178   __ SmiToInteger32(rcx, rcx);
   4179 
   4180   // rax: string
   4181   // rbx: instance type
   4182   // rcx: sub string length
   4183   // rdx: from index (smi)
   4184   // Deal with different string types: update the index if necessary
   4185   // and put the underlying string into edi.
   4186   Label underlying_unpacked, sliced_string, seq_or_external_string;
   4187   // If the string is not indirect, it can only be sequential or external.
   4188   STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
   4189   STATIC_ASSERT(kIsIndirectStringMask != 0);
   4190   __ testb(rbx, Immediate(kIsIndirectStringMask));
   4191   __ j(zero, &seq_or_external_string, Label::kNear);
   4192 
   4193   __ testb(rbx, Immediate(kSlicedNotConsMask));
   4194   __ j(not_zero, &sliced_string, Label::kNear);
   4195   // Cons string.  Check whether it is flat, then fetch first part.
   4196   // Flat cons strings have an empty second part.
   4197   __ CompareRoot(FieldOperand(rax, ConsString::kSecondOffset),
   4198                  Heap::kempty_stringRootIndex);
   4199   __ j(not_equal, &runtime);
   4200   __ movq(rdi, FieldOperand(rax, ConsString::kFirstOffset));
   4201   // Update instance type.
   4202   __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
   4203   __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
   4204   __ jmp(&underlying_unpacked, Label::kNear);
   4205 
   4206   __ bind(&sliced_string);
   4207   // Sliced string.  Fetch parent and correct start index by offset.
   4208   __ addq(rdx, FieldOperand(rax, SlicedString::kOffsetOffset));
   4209   __ movq(rdi, FieldOperand(rax, SlicedString::kParentOffset));
   4210   // Update instance type.
   4211   __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
   4212   __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
   4213   __ jmp(&underlying_unpacked, Label::kNear);
   4214 
   4215   __ bind(&seq_or_external_string);
   4216   // Sequential or external string.  Just move string to the correct register.
   4217   __ movq(rdi, rax);
   4218 
   4219   __ bind(&underlying_unpacked);
   4220 
   4221   if (FLAG_string_slices) {
   4222     Label copy_routine;
   4223     // rdi: underlying subject string
   4224     // rbx: instance type of underlying subject string
   4225     // rdx: adjusted start index (smi)
   4226     // rcx: length
   4227     // If coming from the make_two_character_string path, the string
   4228     // is too short to be sliced anyways.
   4229     __ cmpq(rcx, Immediate(SlicedString::kMinLength));
   4230     // Short slice.  Copy instead of slicing.
   4231     __ j(less, &copy_routine);
   4232     // Allocate new sliced string.  At this point we do not reload the instance
   4233     // type including the string encoding because we simply rely on the info
   4234     // provided by the original string.  It does not matter if the original
   4235     // string's encoding is wrong because we always have to recheck encoding of
   4236     // the newly created string's parent anyways due to externalized strings.
   4237     Label two_byte_slice, set_slice_header;
   4238     STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
   4239     STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
   4240     __ testb(rbx, Immediate(kStringEncodingMask));
   4241     __ j(zero, &two_byte_slice, Label::kNear);
   4242     __ AllocateAsciiSlicedString(rax, rbx, r14, &runtime);
   4243     __ jmp(&set_slice_header, Label::kNear);
   4244     __ bind(&two_byte_slice);
   4245     __ AllocateTwoByteSlicedString(rax, rbx, r14, &runtime);
   4246     __ bind(&set_slice_header);
   4247     __ Integer32ToSmi(rcx, rcx);
   4248     __ movq(FieldOperand(rax, SlicedString::kLengthOffset), rcx);
   4249     __ movq(FieldOperand(rax, SlicedString::kHashFieldOffset),
   4250            Immediate(String::kEmptyHashField));
   4251     __ movq(FieldOperand(rax, SlicedString::kParentOffset), rdi);
   4252     __ movq(FieldOperand(rax, SlicedString::kOffsetOffset), rdx);
   4253     __ IncrementCounter(counters->sub_string_native(), 1);
   4254     __ ret(3 * kPointerSize);
   4255 
   4256     __ bind(&copy_routine);
   4257   }
   4258 
   4259   // rdi: underlying subject string
   4260   // rbx: instance type of underlying subject string
   4261   // rdx: adjusted start index (smi)
   4262   // rcx: length
   4263   // The subject string can only be external or sequential string of either
   4264   // encoding at this point.
   4265   Label two_byte_sequential, sequential_string;
   4266   STATIC_ASSERT(kExternalStringTag != 0);
   4267   STATIC_ASSERT(kSeqStringTag == 0);
   4268   __ testb(rbx, Immediate(kExternalStringTag));
   4269   __ j(zero, &sequential_string);
   4270 
   4271   // Handle external string.
   4272   // Rule out short external strings.
   4273   STATIC_CHECK(kShortExternalStringTag != 0);
   4274   __ testb(rbx, Immediate(kShortExternalStringMask));
   4275   __ j(not_zero, &runtime);
   4276   __ movq(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset));
   4277   // Move the pointer so that offset-wise, it looks like a sequential string.
   4278   STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
   4279   __ subq(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
   4280 
   4281   __ bind(&sequential_string);
   4282   STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
   4283   __ testb(rbx, Immediate(kStringEncodingMask));
   4284   __ j(zero, &two_byte_sequential);
   4285 
   4286   // Allocate the result.
   4287   __ AllocateAsciiString(rax, rcx, r11, r14, r15, &runtime);
   4288 
   4289   // rax: result string
   4290   // rcx: result string length
   4291   __ movq(r14, rsi);  // esi used by following code.
   4292   {  // Locate character of sub string start.
   4293     SmiIndex smi_as_index = masm->SmiToIndex(rdx, rdx, times_1);
   4294     __ lea(rsi, Operand(rdi, smi_as_index.reg, smi_as_index.scale,
   4295                         SeqOneByteString::kHeaderSize - kHeapObjectTag));
   4296   }
   4297   // Locate first character of result.
   4298   __ lea(rdi, FieldOperand(rax, SeqOneByteString::kHeaderSize));
   4299 
   4300   // rax: result string
   4301   // rcx: result length
   4302   // rdi: first character of result
   4303   // rsi: character of sub string start
   4304   // r14: original value of rsi
   4305   StringHelper::GenerateCopyCharactersREP(masm, rdi, rsi, rcx, true);
   4306   __ movq(rsi, r14);  // Restore rsi.
   4307   __ IncrementCounter(counters->sub_string_native(), 1);
   4308   __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
   4309 
   4310   __ bind(&two_byte_sequential);
   4311   // Allocate the result.
   4312   __ AllocateTwoByteString(rax, rcx, r11, r14, r15, &runtime);
   4313 
   4314   // rax: result string
   4315   // rcx: result string length
   4316   __ movq(r14, rsi);  // esi used by following code.
   4317   {  // Locate character of sub string start.
   4318     SmiIndex smi_as_index = masm->SmiToIndex(rdx, rdx, times_2);
   4319     __ lea(rsi, Operand(rdi, smi_as_index.reg, smi_as_index.scale,
   4320                         SeqOneByteString::kHeaderSize - kHeapObjectTag));
   4321   }
   4322   // Locate first character of result.
   4323   __ lea(rdi, FieldOperand(rax, SeqTwoByteString::kHeaderSize));
   4324 
   4325   // rax: result string
   4326   // rcx: result length
   4327   // rdi: first character of result
   4328   // rsi: character of sub string start
   4329   // r14: original value of rsi
   4330   StringHelper::GenerateCopyCharactersREP(masm, rdi, rsi, rcx, false);
   4331   __ movq(rsi, r14);  // Restore esi.
   4332   __ IncrementCounter(counters->sub_string_native(), 1);
   4333   __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
   4334 
   4335   // Just jump to runtime to create the sub string.
   4336   __ bind(&runtime);
   4337   __ TailCallRuntime(Runtime::kSubString, 3, 1);
   4338 
   4339   __ bind(&single_char);
   4340   // rax: string
   4341   // rbx: instance type
   4342   // rcx: sub string length (smi)
   4343   // rdx: from index (smi)
   4344   StringCharAtGenerator generator(
   4345       rax, rdx, rcx, rax, &runtime, &runtime, &runtime, STRING_INDEX_IS_NUMBER);
   4346   generator.GenerateFast(masm);
   4347   __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
   4348   generator.SkipSlow(masm, &runtime);
   4349 }
   4350 
   4351 
   4352 void StringCompareStub::GenerateFlatAsciiStringEquals(MacroAssembler* masm,
   4353                                                       Register left,
   4354                                                       Register right,
   4355                                                       Register scratch1,
   4356                                                       Register scratch2) {
   4357   Register length = scratch1;
   4358 
   4359   // Compare lengths.
   4360   Label check_zero_length;
   4361   __ movq(length, FieldOperand(left, String::kLengthOffset));
   4362   __ SmiCompare(length, FieldOperand(right, String::kLengthOffset));
   4363   __ j(equal, &check_zero_length, Label::kNear);
   4364   __ Move(rax, Smi::FromInt(NOT_EQUAL));
   4365   __ ret(0);
   4366 
   4367   // Check if the length is zero.
   4368   Label compare_chars;
   4369   __ bind(&check_zero_length);
   4370   STATIC_ASSERT(kSmiTag == 0);
   4371   __ SmiTest(length);
   4372   __ j(not_zero, &compare_chars, Label::kNear);
   4373   __ Move(rax, Smi::FromInt(EQUAL));
   4374   __ ret(0);
   4375 
   4376   // Compare characters.
   4377   __ bind(&compare_chars);
   4378   Label strings_not_equal;
   4379   GenerateAsciiCharsCompareLoop(masm, left, right, length, scratch2,
   4380                                 &strings_not_equal, Label::kNear);
   4381 
   4382   // Characters are equal.
   4383   __ Move(rax, Smi::FromInt(EQUAL));
   4384   __ ret(0);
   4385 
   4386   // Characters are not equal.
   4387   __ bind(&strings_not_equal);
   4388   __ Move(rax, Smi::FromInt(NOT_EQUAL));
   4389   __ ret(0);
   4390 }
   4391 
   4392 
   4393 void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
   4394                                                         Register left,
   4395                                                         Register right,
   4396                                                         Register scratch1,
   4397                                                         Register scratch2,
   4398                                                         Register scratch3,
   4399                                                         Register scratch4) {
   4400   // Ensure that you can always subtract a string length from a non-negative
   4401   // number (e.g. another length).
   4402   STATIC_ASSERT(String::kMaxLength < 0x7fffffff);
   4403 
   4404   // Find minimum length and length difference.
   4405   __ movq(scratch1, FieldOperand(left, String::kLengthOffset));
   4406   __ movq(scratch4, scratch1);
   4407   __ SmiSub(scratch4,
   4408             scratch4,
   4409             FieldOperand(right, String::kLengthOffset));
   4410   // Register scratch4 now holds left.length - right.length.
   4411   const Register length_difference = scratch4;
   4412   Label left_shorter;
   4413   __ j(less, &left_shorter, Label::kNear);
   4414   // The right string isn't longer that the left one.
   4415   // Get the right string's length by subtracting the (non-negative) difference
   4416   // from the left string's length.
   4417   __ SmiSub(scratch1, scratch1, length_difference);
   4418   __ bind(&left_shorter);
   4419   // Register scratch1 now holds Min(left.length, right.length).
   4420   const Register min_length = scratch1;
   4421 
   4422   Label compare_lengths;
   4423   // If min-length is zero, go directly to comparing lengths.
   4424   __ SmiTest(min_length);
   4425   __ j(zero, &compare_lengths, Label::kNear);
   4426 
   4427   // Compare loop.
   4428   Label result_not_equal;
   4429   GenerateAsciiCharsCompareLoop(masm, left, right, min_length, scratch2,
   4430                                 &result_not_equal, Label::kNear);
   4431 
   4432   // Completed loop without finding different characters.
   4433   // Compare lengths (precomputed).
   4434   __ bind(&compare_lengths);
   4435   __ SmiTest(length_difference);
   4436   Label length_not_equal;
   4437   __ j(not_zero, &length_not_equal, Label::kNear);
   4438 
   4439   // Result is EQUAL.
   4440   __ Move(rax, Smi::FromInt(EQUAL));
   4441   __ ret(0);
   4442 
   4443   Label result_greater;
   4444   Label result_less;
   4445   __ bind(&length_not_equal);
   4446   __ j(greater, &result_greater, Label::kNear);
   4447   __ jmp(&result_less, Label::kNear);
   4448   __ bind(&result_not_equal);
   4449   // Unequal comparison of left to right, either character or length.
   4450   __ j(above, &result_greater, Label::kNear);
   4451   __ bind(&result_less);
   4452 
   4453   // Result is LESS.
   4454   __ Move(rax, Smi::FromInt(LESS));
   4455   __ ret(0);
   4456 
   4457   // Result is GREATER.
   4458   __ bind(&result_greater);
   4459   __ Move(rax, Smi::FromInt(GREATER));
   4460   __ ret(0);
   4461 }
   4462 
   4463 
   4464 void StringCompareStub::GenerateAsciiCharsCompareLoop(
   4465     MacroAssembler* masm,
   4466     Register left,
   4467     Register right,
   4468     Register length,
   4469     Register scratch,
   4470     Label* chars_not_equal,
   4471     Label::Distance near_jump) {
   4472   // Change index to run from -length to -1 by adding length to string
   4473   // start. This means that loop ends when index reaches zero, which
   4474   // doesn't need an additional compare.
   4475   __ SmiToInteger32(length, length);
   4476   __ lea(left,
   4477          FieldOperand(left, length, times_1, SeqOneByteString::kHeaderSize));
   4478   __ lea(right,
   4479          FieldOperand(right, length, times_1, SeqOneByteString::kHeaderSize));
   4480   __ neg(length);
   4481   Register index = length;  // index = -length;
   4482 
   4483   // Compare loop.
   4484   Label loop;
   4485   __ bind(&loop);
   4486   __ movb(scratch, Operand(left, index, times_1, 0));
   4487   __ cmpb(scratch, Operand(right, index, times_1, 0));
   4488   __ j(not_equal, chars_not_equal, near_jump);
   4489   __ incq(index);
   4490   __ j(not_zero, &loop);
   4491 }
   4492 
   4493 
   4494 void StringCompareStub::Generate(MacroAssembler* masm) {
   4495   Label runtime;
   4496 
   4497   // Stack frame on entry.
   4498   //  rsp[0]  : return address
   4499   //  rsp[8]  : right string
   4500   //  rsp[16] : left string
   4501 
   4502   StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER);
   4503   __ movq(rdx, args.GetArgumentOperand(0));  // left
   4504   __ movq(rax, args.GetArgumentOperand(1));  // right
   4505 
   4506   // Check for identity.
   4507   Label not_same;
   4508   __ cmpq(rdx, rax);
   4509   __ j(not_equal, &not_same, Label::kNear);
   4510   __ Move(rax, Smi::FromInt(EQUAL));
   4511   Counters* counters = masm->isolate()->counters();
   4512   __ IncrementCounter(counters->string_compare_native(), 1);
   4513   __ ret(2 * kPointerSize);
   4514 
   4515   __ bind(&not_same);
   4516 
   4517   // Check that both are sequential ASCII strings.
   4518   __ JumpIfNotBothSequentialAsciiStrings(rdx, rax, rcx, rbx, &runtime);
   4519 
   4520   // Inline comparison of ASCII strings.
   4521   __ IncrementCounter(counters->string_compare_native(), 1);
   4522   // Drop arguments from the stack
   4523   __ PopReturnAddressTo(rcx);
   4524   __ addq(rsp, Immediate(2 * kPointerSize));
   4525   __ PushReturnAddressFrom(rcx);
   4526   GenerateCompareFlatAsciiStrings(masm, rdx, rax, rcx, rbx, rdi, r8);
   4527 
   4528   // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
   4529   // tagged as a small integer.
   4530   __ bind(&runtime);
   4531   __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
   4532 }
   4533 
   4534 
   4535 void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
   4536   ASSERT(state_ == CompareIC::SMI);
   4537   Label miss;
   4538   __ JumpIfNotBothSmi(rdx, rax, &miss, Label::kNear);
   4539 
   4540   if (GetCondition() == equal) {
   4541     // For equality we do not care about the sign of the result.
   4542     __ subq(rax, rdx);
   4543   } else {
   4544     Label done;
   4545     __ subq(rdx, rax);
   4546     __ j(no_overflow, &done, Label::kNear);
   4547     // Correct sign of result in case of overflow.
   4548     __ not_(rdx);
   4549     __ bind(&done);
   4550     __ movq(rax, rdx);
   4551   }
   4552   __ ret(0);
   4553 
   4554   __ bind(&miss);
   4555   GenerateMiss(masm);
   4556 }
   4557 
   4558 
   4559 void ICCompareStub::GenerateNumbers(MacroAssembler* masm) {
   4560   ASSERT(state_ == CompareIC::NUMBER);
   4561 
   4562   Label generic_stub;
   4563   Label unordered, maybe_undefined1, maybe_undefined2;
   4564   Label miss;
   4565 
   4566   if (left_ == CompareIC::SMI) {
   4567     __ JumpIfNotSmi(rdx, &miss);
   4568   }
   4569   if (right_ == CompareIC::SMI) {
   4570     __ JumpIfNotSmi(rax, &miss);
   4571   }
   4572 
   4573   // Load left and right operand.
   4574   Label done, left, left_smi, right_smi;
   4575   __ JumpIfSmi(rax, &right_smi, Label::kNear);
   4576   __ CompareMap(rax, masm->isolate()->factory()->heap_number_map());
   4577   __ j(not_equal, &maybe_undefined1, Label::kNear);
   4578   __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
   4579   __ jmp(&left, Label::kNear);
   4580   __ bind(&right_smi);
   4581   __ SmiToInteger32(rcx, rax);  // Can't clobber rax yet.
   4582   __ Cvtlsi2sd(xmm1, rcx);
   4583 
   4584   __ bind(&left);
   4585   __ JumpIfSmi(rdx, &left_smi, Label::kNear);
   4586   __ CompareMap(rdx, masm->isolate()->factory()->heap_number_map());
   4587   __ j(not_equal, &maybe_undefined2, Label::kNear);
   4588   __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
   4589   __ jmp(&done);
   4590   __ bind(&left_smi);
   4591   __ SmiToInteger32(rcx, rdx);  // Can't clobber rdx yet.
   4592   __ Cvtlsi2sd(xmm0, rcx);
   4593 
   4594   __ bind(&done);
   4595   // Compare operands
   4596   __ ucomisd(xmm0, xmm1);
   4597 
   4598   // Don't base result on EFLAGS when a NaN is involved.
   4599   __ j(parity_even, &unordered, Label::kNear);
   4600 
   4601   // Return a result of -1, 0, or 1, based on EFLAGS.
   4602   // Performing mov, because xor would destroy the flag register.
   4603   __ movl(rax, Immediate(0));
   4604   __ movl(rcx, Immediate(0));
   4605   __ setcc(above, rax);  // Add one to zero if carry clear and not equal.
   4606   __ sbbq(rax, rcx);  // Subtract one if below (aka. carry set).
   4607   __ ret(0);
   4608 
   4609   __ bind(&unordered);
   4610   __ bind(&generic_stub);
   4611   ICCompareStub stub(op_, CompareIC::GENERIC, CompareIC::GENERIC,
   4612                      CompareIC::GENERIC);
   4613   __ jmp(stub.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
   4614 
   4615   __ bind(&maybe_undefined1);
   4616   if (Token::IsOrderedRelationalCompareOp(op_)) {
   4617     __ Cmp(rax, masm->isolate()->factory()->undefined_value());
   4618     __ j(not_equal, &miss);
   4619     __ JumpIfSmi(rdx, &unordered);
   4620     __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx);
   4621     __ j(not_equal, &maybe_undefined2, Label::kNear);
   4622     __ jmp(&unordered);
   4623   }
   4624 
   4625   __ bind(&maybe_undefined2);
   4626   if (Token::IsOrderedRelationalCompareOp(op_)) {
   4627     __ Cmp(rdx, masm->isolate()->factory()->undefined_value());
   4628     __ j(equal, &unordered);
   4629   }
   4630 
   4631   __ bind(&miss);
   4632   GenerateMiss(masm);
   4633 }
   4634 
   4635 
   4636 void ICCompareStub::GenerateInternalizedStrings(MacroAssembler* masm) {
   4637   ASSERT(state_ == CompareIC::INTERNALIZED_STRING);
   4638   ASSERT(GetCondition() == equal);
   4639 
   4640   // Registers containing left and right operands respectively.
   4641   Register left = rdx;
   4642   Register right = rax;
   4643   Register tmp1 = rcx;
   4644   Register tmp2 = rbx;
   4645 
   4646   // Check that both operands are heap objects.
   4647   Label miss;
   4648   Condition cond = masm->CheckEitherSmi(left, right, tmp1);
   4649   __ j(cond, &miss, Label::kNear);
   4650 
   4651   // Check that both operands are internalized strings.
   4652   __ movq(tmp1, FieldOperand(left, HeapObject::kMapOffset));
   4653   __ movq(tmp2, FieldOperand(right, HeapObject::kMapOffset));
   4654   __ movzxbq(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
   4655   __ movzxbq(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
   4656   STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
   4657   __ or_(tmp1, tmp2);
   4658   __ testb(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
   4659   __ j(not_zero, &miss, Label::kNear);
   4660 
   4661   // Internalized strings are compared by identity.
   4662   Label done;
   4663   __ cmpq(left, right);
   4664   // Make sure rax is non-zero. At this point input operands are
   4665   // guaranteed to be non-zero.
   4666   ASSERT(right.is(rax));
   4667   __ j(not_equal, &done, Label::kNear);
   4668   STATIC_ASSERT(EQUAL == 0);
   4669   STATIC_ASSERT(kSmiTag == 0);
   4670   __ Move(rax, Smi::FromInt(EQUAL));
   4671   __ bind(&done);
   4672   __ ret(0);
   4673 
   4674   __ bind(&miss);
   4675   GenerateMiss(masm);
   4676 }
   4677 
   4678 
   4679 void ICCompareStub::GenerateUniqueNames(MacroAssembler* masm) {
   4680   ASSERT(state_ == CompareIC::UNIQUE_NAME);
   4681   ASSERT(GetCondition() == equal);
   4682 
   4683   // Registers containing left and right operands respectively.
   4684   Register left = rdx;
   4685   Register right = rax;
   4686   Register tmp1 = rcx;
   4687   Register tmp2 = rbx;
   4688 
   4689   // Check that both operands are heap objects.
   4690   Label miss;
   4691   Condition cond = masm->CheckEitherSmi(left, right, tmp1);
   4692   __ j(cond, &miss, Label::kNear);
   4693 
   4694   // Check that both operands are unique names. This leaves the instance
   4695   // types loaded in tmp1 and tmp2.
   4696   __ movq(tmp1, FieldOperand(left, HeapObject::kMapOffset));
   4697   __ movq(tmp2, FieldOperand(right, HeapObject::kMapOffset));
   4698   __ movzxbq(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
   4699   __ movzxbq(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
   4700 
   4701   __ JumpIfNotUniqueName(tmp1, &miss, Label::kNear);
   4702   __ JumpIfNotUniqueName(tmp2, &miss, Label::kNear);
   4703 
   4704   // Unique names are compared by identity.
   4705   Label done;
   4706   __ cmpq(left, right);
   4707   // Make sure rax is non-zero. At this point input operands are
   4708   // guaranteed to be non-zero.
   4709   ASSERT(right.is(rax));
   4710   __ j(not_equal, &done, Label::kNear);
   4711   STATIC_ASSERT(EQUAL == 0);
   4712   STATIC_ASSERT(kSmiTag == 0);
   4713   __ Move(rax, Smi::FromInt(EQUAL));
   4714   __ bind(&done);
   4715   __ ret(0);
   4716 
   4717   __ bind(&miss);
   4718   GenerateMiss(masm);
   4719 }
   4720 
   4721 
   4722 void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
   4723   ASSERT(state_ == CompareIC::STRING);
   4724   Label miss;
   4725 
   4726   bool equality = Token::IsEqualityOp(op_);
   4727 
   4728   // Registers containing left and right operands respectively.
   4729   Register left = rdx;
   4730   Register right = rax;
   4731   Register tmp1 = rcx;
   4732   Register tmp2 = rbx;
   4733   Register tmp3 = rdi;
   4734 
   4735   // Check that both operands are heap objects.
   4736   Condition cond = masm->CheckEitherSmi(left, right, tmp1);
   4737   __ j(cond, &miss);
   4738 
   4739   // Check that both operands are strings. This leaves the instance
   4740   // types loaded in tmp1 and tmp2.
   4741   __ movq(tmp1, FieldOperand(left, HeapObject::kMapOffset));
   4742   __ movq(tmp2, FieldOperand(right, HeapObject::kMapOffset));
   4743   __ movzxbq(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
   4744   __ movzxbq(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
   4745   __ movq(tmp3, tmp1);
   4746   STATIC_ASSERT(kNotStringTag != 0);
   4747   __ or_(tmp3, tmp2);
   4748   __ testb(tmp3, Immediate(kIsNotStringMask));
   4749   __ j(not_zero, &miss);
   4750 
   4751   // Fast check for identical strings.
   4752   Label not_same;
   4753   __ cmpq(left, right);
   4754   __ j(not_equal, &not_same, Label::kNear);
   4755   STATIC_ASSERT(EQUAL == 0);
   4756   STATIC_ASSERT(kSmiTag == 0);
   4757   __ Move(rax, Smi::FromInt(EQUAL));
   4758   __ ret(0);
   4759 
   4760   // Handle not identical strings.
   4761   __ bind(&not_same);
   4762 
   4763   // Check that both strings are internalized strings. If they are, we're done
   4764   // because we already know they are not identical. We also know they are both
   4765   // strings.
   4766   if (equality) {
   4767     Label do_compare;
   4768     STATIC_ASSERT(kInternalizedTag == 0);
   4769     __ or_(tmp1, tmp2);
   4770     __ testb(tmp1, Immediate(kIsNotInternalizedMask));
   4771     __ j(not_zero, &do_compare, Label::kNear);
   4772     // Make sure rax is non-zero. At this point input operands are
   4773     // guaranteed to be non-zero.
   4774     ASSERT(right.is(rax));
   4775     __ ret(0);
   4776     __ bind(&do_compare);
   4777   }
   4778 
   4779   // Check that both strings are sequential ASCII.
   4780   Label runtime;
   4781   __ JumpIfNotBothSequentialAsciiStrings(left, right, tmp1, tmp2, &runtime);
   4782 
   4783   // Compare flat ASCII strings. Returns when done.
   4784   if (equality) {
   4785     StringCompareStub::GenerateFlatAsciiStringEquals(
   4786         masm, left, right, tmp1, tmp2);
   4787   } else {
   4788     StringCompareStub::GenerateCompareFlatAsciiStrings(
   4789         masm, left, right, tmp1, tmp2, tmp3, kScratchRegister);
   4790   }
   4791 
   4792   // Handle more complex cases in runtime.
   4793   __ bind(&runtime);
   4794   __ PopReturnAddressTo(tmp1);
   4795   __ push(left);
   4796   __ push(right);
   4797   __ PushReturnAddressFrom(tmp1);
   4798   if (equality) {
   4799     __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
   4800   } else {
   4801     __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
   4802   }
   4803 
   4804   __ bind(&miss);
   4805   GenerateMiss(masm);
   4806 }
   4807 
   4808 
   4809 void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
   4810   ASSERT(state_ == CompareIC::OBJECT);
   4811   Label miss;
   4812   Condition either_smi = masm->CheckEitherSmi(rdx, rax);
   4813   __ j(either_smi, &miss, Label::kNear);
   4814 
   4815   __ CmpObjectType(rax, JS_OBJECT_TYPE, rcx);
   4816   __ j(not_equal, &miss, Label::kNear);
   4817   __ CmpObjectType(rdx, JS_OBJECT_TYPE, rcx);
   4818   __ j(not_equal, &miss, Label::kNear);
   4819 
   4820   ASSERT(GetCondition() == equal);
   4821   __ subq(rax, rdx);
   4822   __ ret(0);
   4823 
   4824   __ bind(&miss);
   4825   GenerateMiss(masm);
   4826 }
   4827 
   4828 
   4829 void ICCompareStub::GenerateKnownObjects(MacroAssembler* masm) {
   4830   Label miss;
   4831   Condition either_smi = masm->CheckEitherSmi(rdx, rax);
   4832   __ j(either_smi, &miss, Label::kNear);
   4833 
   4834   __ movq(rcx, FieldOperand(rax, HeapObject::kMapOffset));
   4835   __ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
   4836   __ Cmp(rcx, known_map_);
   4837   __ j(not_equal, &miss, Label::kNear);
   4838   __ Cmp(rbx, known_map_);
   4839   __ j(not_equal, &miss, Label::kNear);
   4840 
   4841   __ subq(rax, rdx);
   4842   __ ret(0);
   4843 
   4844   __ bind(&miss);
   4845   GenerateMiss(masm);
   4846 }
   4847 
   4848 
   4849 void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
   4850   {
   4851     // Call the runtime system in a fresh internal frame.
   4852     ExternalReference miss =
   4853         ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate());
   4854 
   4855     FrameScope scope(masm, StackFrame::INTERNAL);
   4856     __ push(rdx);
   4857     __ push(rax);
   4858     __ push(rdx);
   4859     __ push(rax);
   4860     __ Push(Smi::FromInt(op_));
   4861     __ CallExternalReference(miss, 3);
   4862 
   4863     // Compute the entry point of the rewritten stub.
   4864     __ lea(rdi, FieldOperand(rax, Code::kHeaderSize));
   4865     __ pop(rax);
   4866     __ pop(rdx);
   4867   }
   4868 
   4869   // Do a tail call to the rewritten stub.
   4870   __ jmp(rdi);
   4871 }
   4872 
   4873 
   4874 void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
   4875                                                       Label* miss,
   4876                                                       Label* done,
   4877                                                       Register properties,
   4878                                                       Handle<Name> name,
   4879                                                       Register r0) {
   4880   ASSERT(name->IsUniqueName());
   4881   // If names of slots in range from 1 to kProbes - 1 for the hash value are
   4882   // not equal to the name and kProbes-th slot is not used (its name is the
   4883   // undefined value), it guarantees the hash table doesn't contain the
   4884   // property. It's true even if some slots represent deleted properties
   4885   // (their names are the hole value).
   4886   for (int i = 0; i < kInlinedProbes; i++) {
   4887     // r0 points to properties hash.
   4888     // Compute the masked index: (hash + i + i * i) & mask.
   4889     Register index = r0;
   4890     // Capacity is smi 2^n.
   4891     __ SmiToInteger32(index, FieldOperand(properties, kCapacityOffset));
   4892     __ decl(index);
   4893     __ and_(index,
   4894             Immediate(name->Hash() + NameDictionary::GetProbeOffset(i)));
   4895 
   4896     // Scale the index by multiplying by the entry size.
   4897     ASSERT(NameDictionary::kEntrySize == 3);
   4898     __ lea(index, Operand(index, index, times_2, 0));  // index *= 3.
   4899 
   4900     Register entity_name = r0;
   4901     // Having undefined at this place means the name is not contained.
   4902     ASSERT_EQ(kSmiTagSize, 1);
   4903     __ movq(entity_name, Operand(properties,
   4904                                  index,
   4905                                  times_pointer_size,
   4906                                  kElementsStartOffset - kHeapObjectTag));
   4907     __ Cmp(entity_name, masm->isolate()->factory()->undefined_value());
   4908     __ j(equal, done);
   4909 
   4910     // Stop if found the property.
   4911     __ Cmp(entity_name, Handle<Name>(name));
   4912     __ j(equal, miss);
   4913 
   4914     Label good;
   4915     // Check for the hole and skip.
   4916     __ CompareRoot(entity_name, Heap::kTheHoleValueRootIndex);
   4917     __ j(equal, &good, Label::kNear);
   4918 
   4919     // Check if the entry name is not a unique name.
   4920     __ movq(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
   4921     __ JumpIfNotUniqueName(FieldOperand(entity_name, Map::kInstanceTypeOffset),
   4922                            miss);
   4923     __ bind(&good);
   4924   }
   4925 
   4926   NameDictionaryLookupStub stub(properties, r0, r0, NEGATIVE_LOOKUP);
   4927   __ Push(Handle<Object>(name));
   4928   __ push(Immediate(name->Hash()));
   4929   __ CallStub(&stub);
   4930   __ testq(r0, r0);
   4931   __ j(not_zero, miss);
   4932   __ jmp(done);
   4933 }
   4934 
   4935 
   4936 // Probe the name dictionary in the |elements| register. Jump to the
   4937 // |done| label if a property with the given name is found leaving the
   4938 // index into the dictionary in |r1|. Jump to the |miss| label
   4939 // otherwise.
   4940 void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
   4941                                                       Label* miss,
   4942                                                       Label* done,
   4943                                                       Register elements,
   4944                                                       Register name,
   4945                                                       Register r0,
   4946                                                       Register r1) {
   4947   ASSERT(!elements.is(r0));
   4948   ASSERT(!elements.is(r1));
   4949   ASSERT(!name.is(r0));
   4950   ASSERT(!name.is(r1));
   4951 
   4952   __ AssertName(name);
   4953 
   4954   __ SmiToInteger32(r0, FieldOperand(elements, kCapacityOffset));
   4955   __ decl(r0);
   4956 
   4957   for (int i = 0; i < kInlinedProbes; i++) {
   4958     // Compute the masked index: (hash + i + i * i) & mask.
   4959     __ movl(r1, FieldOperand(name, Name::kHashFieldOffset));
   4960     __ shrl(r1, Immediate(Name::kHashShift));
   4961     if (i > 0) {
   4962       __ addl(r1, Immediate(NameDictionary::GetProbeOffset(i)));
   4963     }
   4964     __ and_(r1, r0);
   4965 
   4966     // Scale the index by multiplying by the entry size.
   4967     ASSERT(NameDictionary::kEntrySize == 3);
   4968     __ lea(r1, Operand(r1, r1, times_2, 0));  // r1 = r1 * 3
   4969 
   4970     // Check if the key is identical to the name.
   4971     __ cmpq(name, Operand(elements, r1, times_pointer_size,
   4972                           kElementsStartOffset - kHeapObjectTag));
   4973     __ j(equal, done);
   4974   }
   4975 
   4976   NameDictionaryLookupStub stub(elements, r0, r1, POSITIVE_LOOKUP);
   4977   __ push(name);
   4978   __ movl(r0, FieldOperand(name, Name::kHashFieldOffset));
   4979   __ shrl(r0, Immediate(Name::kHashShift));
   4980   __ push(r0);
   4981   __ CallStub(&stub);
   4982 
   4983   __ testq(r0, r0);
   4984   __ j(zero, miss);
   4985   __ jmp(done);
   4986 }
   4987 
   4988 
   4989 void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
   4990   // This stub overrides SometimesSetsUpAFrame() to return false.  That means
   4991   // we cannot call anything that could cause a GC from this stub.
   4992   // Stack frame on entry:
   4993   //  rsp[0 * kPointerSize] : return address.
   4994   //  rsp[1 * kPointerSize] : key's hash.
   4995   //  rsp[2 * kPointerSize] : key.
   4996   // Registers:
   4997   //  dictionary_: NameDictionary to probe.
   4998   //  result_: used as scratch.
   4999   //  index_: will hold an index of entry if lookup is successful.
   5000   //          might alias with result_.
   5001   // Returns:
   5002   //  result_ is zero if lookup failed, non zero otherwise.
   5003 
   5004   Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
   5005 
   5006   Register scratch = result_;
   5007 
   5008   __ SmiToInteger32(scratch, FieldOperand(dictionary_, kCapacityOffset));
   5009   __ decl(scratch);
   5010   __ push(scratch);
   5011 
   5012   // If names of slots in range from 1 to kProbes - 1 for the hash value are
   5013   // not equal to the name and kProbes-th slot is not used (its name is the
   5014   // undefined value), it guarantees the hash table doesn't contain the
   5015   // property. It's true even if some slots represent deleted properties
   5016   // (their names are the null value).
   5017   StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER,
   5018                               kPointerSize);
   5019   for (int i = kInlinedProbes; i < kTotalProbes; i++) {
   5020     // Compute the masked index: (hash + i + i * i) & mask.
   5021     __ movq(scratch, args.GetArgumentOperand(1));
   5022     if (i > 0) {
   5023       __ addl(scratch, Immediate(NameDictionary::GetProbeOffset(i)));
   5024     }
   5025     __ and_(scratch, Operand(rsp, 0));
   5026 
   5027     // Scale the index by multiplying by the entry size.
   5028     ASSERT(NameDictionary::kEntrySize == 3);
   5029     __ lea(index_, Operand(scratch, scratch, times_2, 0));  // index *= 3.
   5030 
   5031     // Having undefined at this place means the name is not contained.
   5032     __ movq(scratch, Operand(dictionary_,
   5033                              index_,
   5034                              times_pointer_size,
   5035                              kElementsStartOffset - kHeapObjectTag));
   5036 
   5037     __ Cmp(scratch, masm->isolate()->factory()->undefined_value());
   5038     __ j(equal, &not_in_dictionary);
   5039 
   5040     // Stop if found the property.
   5041     __ cmpq(scratch, args.GetArgumentOperand(0));
   5042     __ j(equal, &in_dictionary);
   5043 
   5044     if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) {
   5045       // If we hit a key that is not a unique name during negative
   5046       // lookup we have to bailout as this key might be equal to the
   5047       // key we are looking for.
   5048 
   5049       // Check if the entry name is not a unique name.
   5050       __ movq(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
   5051       __ JumpIfNotUniqueName(FieldOperand(scratch, Map::kInstanceTypeOffset),
   5052                              &maybe_in_dictionary);
   5053     }
   5054   }
   5055 
   5056   __ bind(&maybe_in_dictionary);
   5057   // If we are doing negative lookup then probing failure should be
   5058   // treated as a lookup success. For positive lookup probing failure
   5059   // should be treated as lookup failure.
   5060   if (mode_ == POSITIVE_LOOKUP) {
   5061     __ movq(scratch, Immediate(0));
   5062     __ Drop(1);
   5063     __ ret(2 * kPointerSize);
   5064   }
   5065 
   5066   __ bind(&in_dictionary);
   5067   __ movq(scratch, Immediate(1));
   5068   __ Drop(1);
   5069   __ ret(2 * kPointerSize);
   5070 
   5071   __ bind(&not_in_dictionary);
   5072   __ movq(scratch, Immediate(0));
   5073   __ Drop(1);
   5074   __ ret(2 * kPointerSize);
   5075 }
   5076 
   5077 
   5078 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
   5079     Isolate* isolate) {
   5080   StoreBufferOverflowStub stub1(kDontSaveFPRegs);
   5081   stub1.GetCode(isolate);
   5082   StoreBufferOverflowStub stub2(kSaveFPRegs);
   5083   stub2.GetCode(isolate);
   5084 }
   5085 
   5086 
   5087 bool CodeStub::CanUseFPRegisters() {
   5088   return true;  // Always have SSE2 on x64.
   5089 }
   5090 
   5091 
   5092 // Takes the input in 3 registers: address_ value_ and object_.  A pointer to
   5093 // the value has just been written into the object, now this stub makes sure
   5094 // we keep the GC informed.  The word in the object where the value has been
   5095 // written is in the address register.
   5096 void RecordWriteStub::Generate(MacroAssembler* masm) {
   5097   Label skip_to_incremental_noncompacting;
   5098   Label skip_to_incremental_compacting;
   5099 
   5100   // The first two instructions are generated with labels so as to get the
   5101   // offset fixed up correctly by the bind(Label*) call.  We patch it back and
   5102   // forth between a compare instructions (a nop in this position) and the
   5103   // real branch when we start and stop incremental heap marking.
   5104   // See RecordWriteStub::Patch for details.
   5105   __ jmp(&skip_to_incremental_noncompacting, Label::kNear);
   5106   __ jmp(&skip_to_incremental_compacting, Label::kFar);
   5107 
   5108   if (remembered_set_action_ == EMIT_REMEMBERED_SET) {
   5109     __ RememberedSetHelper(object_,
   5110                            address_,
   5111                            value_,
   5112                            save_fp_regs_mode_,
   5113                            MacroAssembler::kReturnAtEnd);
   5114   } else {
   5115     __ ret(0);
   5116   }
   5117 
   5118   __ bind(&skip_to_incremental_noncompacting);
   5119   GenerateIncremental(masm, INCREMENTAL);
   5120 
   5121   __ bind(&skip_to_incremental_compacting);
   5122   GenerateIncremental(masm, INCREMENTAL_COMPACTION);
   5123 
   5124   // Initial mode of the stub is expected to be STORE_BUFFER_ONLY.
   5125   // Will be checked in IncrementalMarking::ActivateGeneratedStub.
   5126   masm->set_byte_at(0, kTwoByteNopInstruction);
   5127   masm->set_byte_at(2, kFiveByteNopInstruction);
   5128 }
   5129 
   5130 
   5131 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
   5132   regs_.Save(masm);
   5133 
   5134   if (remembered_set_action_ == EMIT_REMEMBERED_SET) {
   5135     Label dont_need_remembered_set;
   5136 
   5137     __ movq(regs_.scratch0(), Operand(regs_.address(), 0));
   5138     __ JumpIfNotInNewSpace(regs_.scratch0(),
   5139                            regs_.scratch0(),
   5140                            &dont_need_remembered_set);
   5141 
   5142     __ CheckPageFlag(regs_.object(),
   5143                      regs_.scratch0(),
   5144                      1 << MemoryChunk::SCAN_ON_SCAVENGE,
   5145                      not_zero,
   5146                      &dont_need_remembered_set);
   5147 
   5148     // First notify the incremental marker if necessary, then update the
   5149     // remembered set.
   5150     CheckNeedsToInformIncrementalMarker(
   5151         masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode);
   5152     InformIncrementalMarker(masm, mode);
   5153     regs_.Restore(masm);
   5154     __ RememberedSetHelper(object_,
   5155                            address_,
   5156                            value_,
   5157                            save_fp_regs_mode_,
   5158                            MacroAssembler::kReturnAtEnd);
   5159 
   5160     __ bind(&dont_need_remembered_set);
   5161   }
   5162 
   5163   CheckNeedsToInformIncrementalMarker(
   5164       masm, kReturnOnNoNeedToInformIncrementalMarker, mode);
   5165   InformIncrementalMarker(masm, mode);
   5166   regs_.Restore(masm);
   5167   __ ret(0);
   5168 }
   5169 
   5170 
   5171 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm, Mode mode) {
   5172   regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_);
   5173   Register address =
   5174       arg_reg_1.is(regs_.address()) ? kScratchRegister : regs_.address();
   5175   ASSERT(!address.is(regs_.object()));
   5176   ASSERT(!address.is(arg_reg_1));
   5177   __ Move(address, regs_.address());
   5178   __ Move(arg_reg_1, regs_.object());
   5179   // TODO(gc) Can we just set address arg2 in the beginning?
   5180   __ Move(arg_reg_2, address);
   5181   __ LoadAddress(arg_reg_3,
   5182                  ExternalReference::isolate_address(masm->isolate()));
   5183   int argument_count = 3;
   5184 
   5185   AllowExternalCallThatCantCauseGC scope(masm);
   5186   __ PrepareCallCFunction(argument_count);
   5187   if (mode == INCREMENTAL_COMPACTION) {
   5188     __ CallCFunction(
   5189         ExternalReference::incremental_evacuation_record_write_function(
   5190             masm->isolate()),
   5191         argument_count);
   5192   } else {
   5193     ASSERT(mode == INCREMENTAL);
   5194     __ CallCFunction(
   5195         ExternalReference::incremental_marking_record_write_function(
   5196             masm->isolate()),
   5197         argument_count);
   5198   }
   5199   regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_);
   5200 }
   5201 
   5202 
   5203 void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
   5204     MacroAssembler* masm,
   5205     OnNoNeedToInformIncrementalMarker on_no_need,
   5206     Mode mode) {
   5207   Label on_black;
   5208   Label need_incremental;
   5209   Label need_incremental_pop_object;
   5210 
   5211   __ movq(regs_.scratch0(), Immediate(~Page::kPageAlignmentMask));
   5212   __ and_(regs_.scratch0(), regs_.object());
   5213   __ movq(regs_.scratch1(),
   5214          Operand(regs_.scratch0(),
   5215                  MemoryChunk::kWriteBarrierCounterOffset));
   5216   __ subq(regs_.scratch1(), Immediate(1));
   5217   __ movq(Operand(regs_.scratch0(),
   5218                  MemoryChunk::kWriteBarrierCounterOffset),
   5219          regs_.scratch1());
   5220   __ j(negative, &need_incremental);
   5221 
   5222   // Let's look at the color of the object:  If it is not black we don't have
   5223   // to inform the incremental marker.
   5224   __ JumpIfBlack(regs_.object(),
   5225                  regs_.scratch0(),
   5226                  regs_.scratch1(),
   5227                  &on_black,
   5228                  Label::kNear);
   5229 
   5230   regs_.Restore(masm);
   5231   if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
   5232     __ RememberedSetHelper(object_,
   5233                            address_,
   5234                            value_,
   5235                            save_fp_regs_mode_,
   5236                            MacroAssembler::kReturnAtEnd);
   5237   } else {
   5238     __ ret(0);
   5239   }
   5240 
   5241   __ bind(&on_black);
   5242 
   5243   // Get the value from the slot.
   5244   __ movq(regs_.scratch0(), Operand(regs_.address(), 0));
   5245 
   5246   if (mode == INCREMENTAL_COMPACTION) {
   5247     Label ensure_not_white;
   5248 
   5249     __ CheckPageFlag(regs_.scratch0(),  // Contains value.
   5250                      regs_.scratch1(),  // Scratch.
   5251                      MemoryChunk::kEvacuationCandidateMask,
   5252                      zero,
   5253                      &ensure_not_white,
   5254                      Label::kNear);
   5255 
   5256     __ CheckPageFlag(regs_.object(),
   5257                      regs_.scratch1(),  // Scratch.
   5258                      MemoryChunk::kSkipEvacuationSlotsRecordingMask,
   5259                      zero,
   5260                      &need_incremental);
   5261 
   5262     __ bind(&ensure_not_white);
   5263   }
   5264 
   5265   // We need an extra register for this, so we push the object register
   5266   // temporarily.
   5267   __ push(regs_.object());
   5268   __ EnsureNotWhite(regs_.scratch0(),  // The value.
   5269                     regs_.scratch1(),  // Scratch.
   5270                     regs_.object(),  // Scratch.
   5271                     &need_incremental_pop_object,
   5272                     Label::kNear);
   5273   __ pop(regs_.object());
   5274 
   5275   regs_.Restore(masm);
   5276   if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
   5277     __ RememberedSetHelper(object_,
   5278                            address_,
   5279                            value_,
   5280                            save_fp_regs_mode_,
   5281                            MacroAssembler::kReturnAtEnd);
   5282   } else {
   5283     __ ret(0);
   5284   }
   5285 
   5286   __ bind(&need_incremental_pop_object);
   5287   __ pop(regs_.object());
   5288 
   5289   __ bind(&need_incremental);
   5290 
   5291   // Fall through when we need to inform the incremental marker.
   5292 }
   5293 
   5294 
   5295 void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
   5296   // ----------- S t a t e -------------
   5297   //  -- rax     : element value to store
   5298   //  -- rcx     : element index as smi
   5299   //  -- rsp[0]  : return address
   5300   //  -- rsp[8]  : array literal index in function
   5301   //  -- rsp[16] : array literal
   5302   // clobbers rbx, rdx, rdi
   5303   // -----------------------------------
   5304 
   5305   Label element_done;
   5306   Label double_elements;
   5307   Label smi_element;
   5308   Label slow_elements;
   5309   Label fast_elements;
   5310 
   5311   // Get array literal index, array literal and its map.
   5312   StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER);
   5313   __ movq(rdx, args.GetArgumentOperand(1));
   5314   __ movq(rbx, args.GetArgumentOperand(0));
   5315   __ movq(rdi, FieldOperand(rbx, JSObject::kMapOffset));
   5316 
   5317   __ CheckFastElements(rdi, &double_elements);
   5318 
   5319   // FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS
   5320   __ JumpIfSmi(rax, &smi_element);
   5321   __ CheckFastSmiElements(rdi, &fast_elements);
   5322 
   5323   // Store into the array literal requires a elements transition. Call into
   5324   // the runtime.
   5325 
   5326   __ bind(&slow_elements);
   5327   __ PopReturnAddressTo(rdi);
   5328   __ push(rbx);
   5329   __ push(rcx);
   5330   __ push(rax);
   5331   __ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
   5332   __ push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
   5333   __ push(rdx);
   5334   __ PushReturnAddressFrom(rdi);
   5335   __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1);
   5336 
   5337   // Array literal has ElementsKind of FAST_*_ELEMENTS and value is an object.
   5338   __ bind(&fast_elements);
   5339   __ SmiToInteger32(kScratchRegister, rcx);
   5340   __ movq(rbx, FieldOperand(rbx, JSObject::kElementsOffset));
   5341   __ lea(rcx, FieldOperand(rbx, kScratchRegister, times_pointer_size,
   5342                            FixedArrayBase::kHeaderSize));
   5343   __ movq(Operand(rcx, 0), rax);
   5344   // Update the write barrier for the array store.
   5345   __ RecordWrite(rbx, rcx, rax,
   5346                  kDontSaveFPRegs,
   5347                  EMIT_REMEMBERED_SET,
   5348                  OMIT_SMI_CHECK);
   5349   __ ret(0);
   5350 
   5351   // Array literal has ElementsKind of FAST_*_SMI_ELEMENTS or
   5352   // FAST_*_ELEMENTS, and value is Smi.
   5353   __ bind(&smi_element);
   5354   __ SmiToInteger32(kScratchRegister, rcx);
   5355   __ movq(rbx, FieldOperand(rbx, JSObject::kElementsOffset));
   5356   __ movq(FieldOperand(rbx, kScratchRegister, times_pointer_size,
   5357                        FixedArrayBase::kHeaderSize), rax);
   5358   __ ret(0);
   5359 
   5360   // Array literal has ElementsKind of FAST_DOUBLE_ELEMENTS.
   5361   __ bind(&double_elements);
   5362 
   5363   __ movq(r9, FieldOperand(rbx, JSObject::kElementsOffset));
   5364   __ SmiToInteger32(r11, rcx);
   5365   __ StoreNumberToDoubleElements(rax,
   5366                                  r9,
   5367                                  r11,
   5368                                  xmm0,
   5369                                  &slow_elements);
   5370   __ ret(0);
   5371 }
   5372 
   5373 
   5374 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
   5375   CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
   5376   __ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
   5377   int parameter_count_offset =
   5378       StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
   5379   __ movq(rbx, MemOperand(rbp, parameter_count_offset));
   5380   masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
   5381   __ PopReturnAddressTo(rcx);
   5382   int additional_offset = function_mode_ == JS_FUNCTION_STUB_MODE
   5383       ? kPointerSize
   5384       : 0;
   5385   __ lea(rsp, MemOperand(rsp, rbx, times_pointer_size, additional_offset));
   5386   __ jmp(rcx);  // Return to IC Miss stub, continuation still on stack.
   5387 }
   5388 
   5389 
   5390 void StubFailureTailCallTrampolineStub::Generate(MacroAssembler* masm) {
   5391   CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
   5392   __ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
   5393   __ movq(rdi, rax);
   5394   int parameter_count_offset =
   5395       StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
   5396   __ movq(rax, MemOperand(rbp, parameter_count_offset));
   5397   // The parameter count above includes the receiver for the arguments passed to
   5398   // the deoptimization handler. Subtract the receiver for the parameter count
   5399   // for the call.
   5400   __ subl(rax, Immediate(1));
   5401   masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
   5402   ParameterCount argument_count(rax);
   5403   __ InvokeFunction(
   5404       rdi, argument_count, JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
   5405 }
   5406 
   5407 
   5408 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
   5409   if (masm->isolate()->function_entry_hook() != NULL) {
   5410     ProfileEntryHookStub stub;
   5411     masm->CallStub(&stub);
   5412   }
   5413 }
   5414 
   5415 
   5416 void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
   5417   // This stub can be called from essentially anywhere, so it needs to save
   5418   // all volatile and callee-save registers.
   5419   const size_t kNumSavedRegisters = 2;
   5420   __ push(arg_reg_1);
   5421   __ push(arg_reg_2);
   5422 
   5423   // Calculate the original stack pointer and store it in the second arg.
   5424   __ lea(arg_reg_2, Operand(rsp, (kNumSavedRegisters + 1) * kPointerSize));
   5425 
   5426   // Calculate the function address to the first arg.
   5427   __ movq(arg_reg_1, Operand(rsp, kNumSavedRegisters * kPointerSize));
   5428   __ subq(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength));
   5429 
   5430   // Save the remainder of the volatile registers.
   5431   masm->PushCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2);
   5432 
   5433   // Call the entry hook function.
   5434   __ movq(rax, FUNCTION_ADDR(masm->isolate()->function_entry_hook()),
   5435           RelocInfo::NONE64);
   5436 
   5437   AllowExternalCallThatCantCauseGC scope(masm);
   5438 
   5439   const int kArgumentCount = 2;
   5440   __ PrepareCallCFunction(kArgumentCount);
   5441   __ CallCFunction(rax, kArgumentCount);
   5442 
   5443   // Restore volatile regs.
   5444   masm->PopCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2);
   5445   __ pop(arg_reg_2);
   5446   __ pop(arg_reg_1);
   5447 
   5448   __ Ret();
   5449 }
   5450 
   5451 
   5452 template<class T>
   5453 static void CreateArrayDispatch(MacroAssembler* masm,
   5454                                 AllocationSiteOverrideMode mode) {
   5455   if (mode == DISABLE_ALLOCATION_SITES) {
   5456     T stub(GetInitialFastElementsKind(),
   5457            CONTEXT_CHECK_REQUIRED,
   5458            mode);
   5459     __ TailCallStub(&stub);
   5460   } else if (mode == DONT_OVERRIDE) {
   5461     int last_index = GetSequenceIndexFromFastElementsKind(
   5462         TERMINAL_FAST_ELEMENTS_KIND);
   5463     for (int i = 0; i <= last_index; ++i) {
   5464       Label next;
   5465       ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
   5466       __ cmpl(rdx, Immediate(kind));
   5467       __ j(not_equal, &next);
   5468       T stub(kind);
   5469       __ TailCallStub(&stub);
   5470       __ bind(&next);
   5471     }
   5472 
   5473     // If we reached this point there is a problem.
   5474     __ Abort(kUnexpectedElementsKindInArrayConstructor);
   5475   } else {
   5476     UNREACHABLE();
   5477   }
   5478 }
   5479 
   5480 
   5481 static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
   5482                                            AllocationSiteOverrideMode mode) {
   5483   // rbx - type info cell (if mode != DISABLE_ALLOCATION_SITES)
   5484   // rdx - kind (if mode != DISABLE_ALLOCATION_SITES)
   5485   // rax - number of arguments
   5486   // rdi - constructor?
   5487   // rsp[0] - return address
   5488   // rsp[8] - last argument
   5489   Handle<Object> undefined_sentinel(
   5490       masm->isolate()->heap()->undefined_value(),
   5491       masm->isolate());
   5492 
   5493   Label normal_sequence;
   5494   if (mode == DONT_OVERRIDE) {
   5495     ASSERT(FAST_SMI_ELEMENTS == 0);
   5496     ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
   5497     ASSERT(FAST_ELEMENTS == 2);
   5498     ASSERT(FAST_HOLEY_ELEMENTS == 3);
   5499     ASSERT(FAST_DOUBLE_ELEMENTS == 4);
   5500     ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5);
   5501 
   5502     // is the low bit set? If so, we are holey and that is good.
   5503     __ testb(rdx, Immediate(1));
   5504     __ j(not_zero, &normal_sequence);
   5505   }
   5506 
   5507   // look at the first argument
   5508   StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
   5509   __ movq(rcx, args.GetArgumentOperand(0));
   5510   __ testq(rcx, rcx);
   5511   __ j(zero, &normal_sequence);
   5512 
   5513   if (mode == DISABLE_ALLOCATION_SITES) {
   5514     ElementsKind initial = GetInitialFastElementsKind();
   5515     ElementsKind holey_initial = GetHoleyElementsKind(initial);
   5516 
   5517     ArraySingleArgumentConstructorStub stub_holey(holey_initial,
   5518                                                   CONTEXT_CHECK_REQUIRED,
   5519                                                   DISABLE_ALLOCATION_SITES);
   5520     __ TailCallStub(&stub_holey);
   5521 
   5522     __ bind(&normal_sequence);
   5523     ArraySingleArgumentConstructorStub stub(initial,
   5524                                             CONTEXT_CHECK_REQUIRED,
   5525                                             DISABLE_ALLOCATION_SITES);
   5526     __ TailCallStub(&stub);
   5527   } else if (mode == DONT_OVERRIDE) {
   5528     // We are going to create a holey array, but our kind is non-holey.
   5529     // Fix kind and retry (only if we have an allocation site in the cell).
   5530     __ incl(rdx);
   5531     __ movq(rcx, FieldOperand(rbx, Cell::kValueOffset));
   5532     if (FLAG_debug_code) {
   5533       Handle<Map> allocation_site_map =
   5534           masm->isolate()->factory()->allocation_site_map();
   5535       __ Cmp(FieldOperand(rcx, 0), allocation_site_map);
   5536       __ Assert(equal, kExpectedAllocationSiteInCell);
   5537     }
   5538 
   5539     // Save the resulting elements kind in type info. We can't just store r3
   5540     // in the AllocationSite::transition_info field because elements kind is
   5541     // restricted to a portion of the field...upper bits need to be left alone.
   5542     STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
   5543     __ SmiAddConstant(FieldOperand(rcx, AllocationSite::kTransitionInfoOffset),
   5544                       Smi::FromInt(kFastElementsKindPackedToHoley));
   5545 
   5546     __ bind(&normal_sequence);
   5547     int last_index = GetSequenceIndexFromFastElementsKind(
   5548         TERMINAL_FAST_ELEMENTS_KIND);
   5549     for (int i = 0; i <= last_index; ++i) {
   5550       Label next;
   5551       ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
   5552       __ cmpl(rdx, Immediate(kind));
   5553       __ j(not_equal, &next);
   5554       ArraySingleArgumentConstructorStub stub(kind);
   5555       __ TailCallStub(&stub);
   5556       __ bind(&next);
   5557     }
   5558 
   5559     // If we reached this point there is a problem.
   5560     __ Abort(kUnexpectedElementsKindInArrayConstructor);
   5561   } else {
   5562     UNREACHABLE();
   5563   }
   5564 }
   5565 
   5566 
   5567 template<class T>
   5568 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
   5569   ElementsKind initial_kind = GetInitialFastElementsKind();
   5570   ElementsKind initial_holey_kind = GetHoleyElementsKind(initial_kind);
   5571 
   5572   int to_index = GetSequenceIndexFromFastElementsKind(
   5573       TERMINAL_FAST_ELEMENTS_KIND);
   5574   for (int i = 0; i <= to_index; ++i) {
   5575     ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
   5576     T stub(kind);
   5577     stub.GetCode(isolate);
   5578     if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE ||
   5579         (!FLAG_track_allocation_sites &&
   5580          (kind == initial_kind || kind == initial_holey_kind))) {
   5581       T stub1(kind, CONTEXT_CHECK_REQUIRED, DISABLE_ALLOCATION_SITES);
   5582       stub1.GetCode(isolate);
   5583     }
   5584   }
   5585 }
   5586 
   5587 
   5588 void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) {
   5589   ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
   5590       isolate);
   5591   ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
   5592       isolate);
   5593   ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
   5594       isolate);
   5595 }
   5596 
   5597 
   5598 void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime(
   5599     Isolate* isolate) {
   5600   ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
   5601   for (int i = 0; i < 2; i++) {
   5602     // For internal arrays we only need a few things
   5603     InternalArrayNoArgumentConstructorStub stubh1(kinds[i]);
   5604     stubh1.GetCode(isolate);
   5605     InternalArraySingleArgumentConstructorStub stubh2(kinds[i]);
   5606     stubh2.GetCode(isolate);
   5607     InternalArrayNArgumentsConstructorStub stubh3(kinds[i]);
   5608     stubh3.GetCode(isolate);
   5609   }
   5610 }
   5611 
   5612 
   5613 void ArrayConstructorStub::GenerateDispatchToArrayStub(
   5614     MacroAssembler* masm,
   5615     AllocationSiteOverrideMode mode) {
   5616   if (argument_count_ == ANY) {
   5617     Label not_zero_case, not_one_case;
   5618     __ testq(rax, rax);
   5619     __ j(not_zero, &not_zero_case);
   5620     CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
   5621 
   5622     __ bind(&not_zero_case);
   5623     __ cmpl(rax, Immediate(1));
   5624     __ j(greater, &not_one_case);
   5625     CreateArrayDispatchOneArgument(masm, mode);
   5626 
   5627     __ bind(&not_one_case);
   5628     CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
   5629   } else if (argument_count_ == NONE) {
   5630     CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
   5631   } else if (argument_count_ == ONE) {
   5632     CreateArrayDispatchOneArgument(masm, mode);
   5633   } else if (argument_count_ == MORE_THAN_ONE) {
   5634     CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
   5635   } else {
   5636     UNREACHABLE();
   5637   }
   5638 }
   5639 
   5640 
   5641 void ArrayConstructorStub::Generate(MacroAssembler* masm) {
   5642   // ----------- S t a t e -------------
   5643   //  -- rax    : argc
   5644   //  -- rbx    : type info cell
   5645   //  -- rdi    : constructor
   5646   //  -- rsp[0] : return address
   5647   //  -- rsp[8] : last argument
   5648   // -----------------------------------
   5649   Handle<Object> undefined_sentinel(
   5650       masm->isolate()->heap()->undefined_value(),
   5651       masm->isolate());
   5652 
   5653   if (FLAG_debug_code) {
   5654     // The array construct code is only set for the global and natives
   5655     // builtin Array functions which always have maps.
   5656 
   5657     // Initial map for the builtin Array function should be a map.
   5658     __ movq(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
   5659     // Will both indicate a NULL and a Smi.
   5660     STATIC_ASSERT(kSmiTag == 0);
   5661     Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
   5662     __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
   5663     __ CmpObjectType(rcx, MAP_TYPE, rcx);
   5664     __ Check(equal, kUnexpectedInitialMapForArrayFunction);
   5665 
   5666     // We should either have undefined in rbx or a valid cell
   5667     Label okay_here;
   5668     Handle<Map> cell_map = masm->isolate()->factory()->cell_map();
   5669     __ Cmp(rbx, undefined_sentinel);
   5670     __ j(equal, &okay_here);
   5671     __ Cmp(FieldOperand(rbx, 0), cell_map);
   5672     __ Assert(equal, kExpectedPropertyCellInRegisterRbx);
   5673     __ bind(&okay_here);
   5674   }
   5675 
   5676   Label no_info;
   5677   // If the type cell is undefined, or contains anything other than an
   5678   // AllocationSite, call an array constructor that doesn't use AllocationSites.
   5679   __ Cmp(rbx, undefined_sentinel);
   5680   __ j(equal, &no_info);
   5681   __ movq(rdx, FieldOperand(rbx, Cell::kValueOffset));
   5682   __ Cmp(FieldOperand(rdx, 0),
   5683          masm->isolate()->factory()->allocation_site_map());
   5684   __ j(not_equal, &no_info);
   5685 
   5686   // Only look at the lower 16 bits of the transition info.
   5687   __ movq(rdx, FieldOperand(rdx, AllocationSite::kTransitionInfoOffset));
   5688   __ SmiToInteger32(rdx, rdx);
   5689   STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
   5690   __ and_(rdx, Immediate(AllocationSite::ElementsKindBits::kMask));
   5691   GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
   5692 
   5693   __ bind(&no_info);
   5694   GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
   5695 }
   5696 
   5697 
   5698 void InternalArrayConstructorStub::GenerateCase(
   5699     MacroAssembler* masm, ElementsKind kind) {
   5700   Label not_zero_case, not_one_case;
   5701   Label normal_sequence;
   5702 
   5703   __ testq(rax, rax);
   5704   __ j(not_zero, &not_zero_case);
   5705   InternalArrayNoArgumentConstructorStub stub0(kind);
   5706   __ TailCallStub(&stub0);
   5707 
   5708   __ bind(&not_zero_case);
   5709   __ cmpl(rax, Immediate(1));
   5710   __ j(greater, &not_one_case);
   5711 
   5712   if (IsFastPackedElementsKind(kind)) {
   5713     // We might need to create a holey array
   5714     // look at the first argument
   5715     StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
   5716     __ movq(rcx, args.GetArgumentOperand(0));
   5717     __ testq(rcx, rcx);
   5718     __ j(zero, &normal_sequence);
   5719 
   5720     InternalArraySingleArgumentConstructorStub
   5721         stub1_holey(GetHoleyElementsKind(kind));
   5722     __ TailCallStub(&stub1_holey);
   5723   }
   5724 
   5725   __ bind(&normal_sequence);
   5726   InternalArraySingleArgumentConstructorStub stub1(kind);
   5727   __ TailCallStub(&stub1);
   5728 
   5729   __ bind(&not_one_case);
   5730   InternalArrayNArgumentsConstructorStub stubN(kind);
   5731   __ TailCallStub(&stubN);
   5732 }
   5733 
   5734 
   5735 void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
   5736   // ----------- S t a t e -------------
   5737   //  -- rax    : argc
   5738   //  -- rbx    : type info cell
   5739   //  -- rdi    : constructor
   5740   //  -- rsp[0] : return address
   5741   //  -- rsp[8] : last argument
   5742   // -----------------------------------
   5743 
   5744   if (FLAG_debug_code) {
   5745     // The array construct code is only set for the global and natives
   5746     // builtin Array functions which always have maps.
   5747 
   5748     // Initial map for the builtin Array function should be a map.
   5749     __ movq(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
   5750     // Will both indicate a NULL and a Smi.
   5751     STATIC_ASSERT(kSmiTag == 0);
   5752     Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
   5753     __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
   5754     __ CmpObjectType(rcx, MAP_TYPE, rcx);
   5755     __ Check(equal, kUnexpectedInitialMapForArrayFunction);
   5756   }
   5757 
   5758   // Figure out the right elements kind
   5759   __ movq(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
   5760 
   5761   // Load the map's "bit field 2" into |result|. We only need the first byte,
   5762   // but the following masking takes care of that anyway.
   5763   __ movzxbq(rcx, FieldOperand(rcx, Map::kBitField2Offset));
   5764   // Retrieve elements_kind from bit field 2.
   5765   __ and_(rcx, Immediate(Map::kElementsKindMask));
   5766   __ shr(rcx, Immediate(Map::kElementsKindShift));
   5767 
   5768   if (FLAG_debug_code) {
   5769     Label done;
   5770     __ cmpl(rcx, Immediate(FAST_ELEMENTS));
   5771     __ j(equal, &done);
   5772     __ cmpl(rcx, Immediate(FAST_HOLEY_ELEMENTS));
   5773     __ Assert(equal,
   5774               kInvalidElementsKindForInternalArrayOrInternalPackedArray);
   5775     __ bind(&done);
   5776   }
   5777 
   5778   Label fast_elements_case;
   5779   __ cmpl(rcx, Immediate(FAST_ELEMENTS));
   5780   __ j(equal, &fast_elements_case);
   5781   GenerateCase(masm, FAST_HOLEY_ELEMENTS);
   5782 
   5783   __ bind(&fast_elements_case);
   5784   GenerateCase(masm, FAST_ELEMENTS);
   5785 }
   5786 
   5787 
   5788 #undef __
   5789 
   5790 } }  // namespace v8::internal
   5791 
   5792 #endif  // V8_TARGET_ARCH_X64
   5793