Home | History | Annotate | Download | only in x64
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #if V8_TARGET_ARCH_X64
      6 
      7 #include "src/code-factory.h"
      8 #include "src/codegen.h"
      9 #include "src/deoptimizer.h"
     10 #include "src/full-codegen/full-codegen.h"
     11 
     12 namespace v8 {
     13 namespace internal {
     14 
     15 
     16 #define __ ACCESS_MASM(masm)
     17 
     18 void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id) {
     19   // ----------- S t a t e -------------
     20   //  -- rax                 : number of arguments excluding receiver
     21   //  -- rdi                 : target
     22   //  -- rdx                 : new.target
     23   //  -- rsp[0]              : return address
     24   //  -- rsp[8]              : last argument
     25   //  -- ...
     26   //  -- rsp[8 * argc]       : first argument
     27   //  -- rsp[8 * (argc + 1)] : receiver
     28   // -----------------------------------
     29   __ AssertFunction(rdi);
     30 
     31   // Make sure we operate in the context of the called function (for example
     32   // ConstructStubs implemented in C++ will be run in the context of the caller
     33   // instead of the callee, due to the way that [[Construct]] is defined for
     34   // ordinary functions).
     35   __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
     36 
     37   // Unconditionally insert the target and new target as extra arguments. They
     38   // will be used by stack frame iterators when constructing the stack trace.
     39   const int num_extra_args = 2;
     40   __ PopReturnAddressTo(kScratchRegister);
     41   __ Push(rdi);
     42   __ Push(rdx);
     43   __ PushReturnAddressFrom(kScratchRegister);
     44 
     45   // JumpToExternalReference expects rax to contain the number of arguments
     46   // including the receiver and the extra arguments.
     47   __ addp(rax, Immediate(num_extra_args + 1));
     48 
     49   __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
     50 }
     51 
     52 
     53 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
     54   __ movp(kScratchRegister,
     55           FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
     56   __ movp(kScratchRegister,
     57           FieldOperand(kScratchRegister, SharedFunctionInfo::kCodeOffset));
     58   __ leap(kScratchRegister, FieldOperand(kScratchRegister, Code::kHeaderSize));
     59   __ jmp(kScratchRegister);
     60 }
     61 
     62 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
     63                                            Runtime::FunctionId function_id) {
     64   // ----------- S t a t e -------------
     65   //  -- rax : argument count (preserved for callee)
     66   //  -- rdx : new target (preserved for callee)
     67   //  -- rdi : target function (preserved for callee)
     68   // -----------------------------------
     69   {
     70     FrameScope scope(masm, StackFrame::INTERNAL);
     71     // Push the number of arguments to the callee.
     72     __ Integer32ToSmi(rax, rax);
     73     __ Push(rax);
     74     // Push a copy of the target function and the new target.
     75     __ Push(rdi);
     76     __ Push(rdx);
     77     // Function is also the parameter to the runtime call.
     78     __ Push(rdi);
     79 
     80     __ CallRuntime(function_id, 1);
     81     __ movp(rbx, rax);
     82 
     83     // Restore target function and new target.
     84     __ Pop(rdx);
     85     __ Pop(rdi);
     86     __ Pop(rax);
     87     __ SmiToInteger32(rax, rax);
     88   }
     89   __ leap(rbx, FieldOperand(rbx, Code::kHeaderSize));
     90   __ jmp(rbx);
     91 }
     92 
     93 
     94 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
     95   // Checking whether the queued function is ready for install is optional,
     96   // since we come across interrupts and stack checks elsewhere.  However,
     97   // not checking may delay installing ready functions, and always checking
     98   // would be quite expensive.  A good compromise is to first check against
     99   // stack limit as a cue for an interrupt signal.
    100   Label ok;
    101   __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
    102   __ j(above_equal, &ok);
    103 
    104   GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
    105 
    106   __ bind(&ok);
    107   GenerateTailCallToSharedCode(masm);
    108 }
    109 
    110 
    111 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
    112                                            bool is_api_function,
    113                                            bool create_implicit_receiver,
    114                                            bool check_derived_construct) {
    115   // ----------- S t a t e -------------
    116   //  -- rax: number of arguments
    117   //  -- rsi: context
    118   //  -- rdi: constructor function
    119   //  -- rbx: allocation site or undefined
    120   //  -- rdx: new target
    121   // -----------------------------------
    122 
    123   // Enter a construct frame.
    124   {
    125     FrameScope scope(masm, StackFrame::CONSTRUCT);
    126 
    127     // Preserve the incoming parameters on the stack.
    128     __ AssertUndefinedOrAllocationSite(rbx);
    129     __ Push(rsi);
    130     __ Push(rbx);
    131     __ Integer32ToSmi(rcx, rax);
    132     __ Push(rcx);
    133 
    134     if (create_implicit_receiver) {
    135       // Allocate the new receiver object.
    136       __ Push(rdi);
    137       __ Push(rdx);
    138       FastNewObjectStub stub(masm->isolate());
    139       __ CallStub(&stub);
    140       __ movp(rbx, rax);
    141       __ Pop(rdx);
    142       __ Pop(rdi);
    143 
    144       // ----------- S t a t e -------------
    145       //  -- rdi: constructor function
    146       //  -- rbx: newly allocated object
    147       //  -- rdx: new target
    148       // -----------------------------------
    149 
    150       // Retrieve smi-tagged arguments count from the stack.
    151       __ SmiToInteger32(rax, Operand(rsp, 0 * kPointerSize));
    152     }
    153 
    154     if (create_implicit_receiver) {
    155       // Push the allocated receiver to the stack. We need two copies
    156       // because we may have to return the original one and the calling
    157       // conventions dictate that the called function pops the receiver.
    158       __ Push(rbx);
    159       __ Push(rbx);
    160     } else {
    161       __ PushRoot(Heap::kTheHoleValueRootIndex);
    162     }
    163 
    164     // Set up pointer to last argument.
    165     __ leap(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
    166 
    167     // Copy arguments and receiver to the expression stack.
    168     Label loop, entry;
    169     __ movp(rcx, rax);
    170     __ jmp(&entry);
    171     __ bind(&loop);
    172     __ Push(Operand(rbx, rcx, times_pointer_size, 0));
    173     __ bind(&entry);
    174     __ decp(rcx);
    175     __ j(greater_equal, &loop);
    176 
    177     // Call the function.
    178     ParameterCount actual(rax);
    179     __ InvokeFunction(rdi, rdx, actual, CALL_FUNCTION,
    180                       CheckDebugStepCallWrapper());
    181 
    182     // Store offset of return address for deoptimizer.
    183     if (create_implicit_receiver && !is_api_function) {
    184       masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
    185     }
    186 
    187     // Restore context from the frame.
    188     __ movp(rsi, Operand(rbp, ConstructFrameConstants::kContextOffset));
    189 
    190     if (create_implicit_receiver) {
    191       // If the result is an object (in the ECMA sense), we should get rid
    192       // of the receiver and use the result; see ECMA-262 section 13.2.2-7
    193       // on page 74.
    194       Label use_receiver, exit;
    195       // If the result is a smi, it is *not* an object in the ECMA sense.
    196       __ JumpIfSmi(rax, &use_receiver);
    197 
    198       // If the type of the result (stored in its map) is less than
    199       // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
    200       STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
    201       __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx);
    202       __ j(above_equal, &exit);
    203 
    204       // Throw away the result of the constructor invocation and use the
    205       // on-stack receiver as the result.
    206       __ bind(&use_receiver);
    207       __ movp(rax, Operand(rsp, 0));
    208 
    209       // Restore the arguments count and leave the construct frame. The
    210       // arguments count is stored below the receiver.
    211       __ bind(&exit);
    212       __ movp(rbx, Operand(rsp, 1 * kPointerSize));
    213     } else {
    214       __ movp(rbx, Operand(rsp, 0));
    215     }
    216 
    217     // Leave construct frame.
    218   }
    219 
    220   // ES6 9.2.2. Step 13+
    221   // Check that the result is not a Smi, indicating that the constructor result
    222   // from a derived class is neither undefined nor an Object.
    223   if (check_derived_construct) {
    224     Label dont_throw;
    225     __ JumpIfNotSmi(rax, &dont_throw);
    226     {
    227       FrameScope scope(masm, StackFrame::INTERNAL);
    228       __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
    229     }
    230     __ bind(&dont_throw);
    231   }
    232 
    233   // Remove caller arguments from the stack and return.
    234   __ PopReturnAddressTo(rcx);
    235   SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
    236   __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
    237   __ PushReturnAddressFrom(rcx);
    238   if (create_implicit_receiver) {
    239     Counters* counters = masm->isolate()->counters();
    240     __ IncrementCounter(counters->constructed_objects(), 1);
    241   }
    242   __ ret(0);
    243 }
    244 
    245 
    246 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
    247   Generate_JSConstructStubHelper(masm, false, true, false);
    248 }
    249 
    250 
    251 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
    252   Generate_JSConstructStubHelper(masm, true, false, false);
    253 }
    254 
    255 
    256 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
    257   Generate_JSConstructStubHelper(masm, false, false, false);
    258 }
    259 
    260 
    261 void Builtins::Generate_JSBuiltinsConstructStubForDerived(
    262     MacroAssembler* masm) {
    263   Generate_JSConstructStubHelper(masm, false, false, true);
    264 }
    265 
    266 
    267 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
    268   FrameScope scope(masm, StackFrame::INTERNAL);
    269   __ Push(rdi);
    270   __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
    271 }
    272 
    273 
    274 enum IsTagged { kRaxIsSmiTagged, kRaxIsUntaggedInt };
    275 
    276 
    277 // Clobbers rcx, r11, kScratchRegister; preserves all other registers.
    278 static void Generate_CheckStackOverflow(MacroAssembler* masm,
    279                                         IsTagged rax_is_tagged) {
    280   // rax   : the number of items to be pushed to the stack
    281   //
    282   // Check the stack for overflow. We are not trying to catch
    283   // interruptions (e.g. debug break and preemption) here, so the "real stack
    284   // limit" is checked.
    285   Label okay;
    286   __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
    287   __ movp(rcx, rsp);
    288   // Make rcx the space we have left. The stack might already be overflowed
    289   // here which will cause rcx to become negative.
    290   __ subp(rcx, kScratchRegister);
    291   // Make r11 the space we need for the array when it is unrolled onto the
    292   // stack.
    293   if (rax_is_tagged == kRaxIsSmiTagged) {
    294     __ PositiveSmiTimesPowerOfTwoToInteger64(r11, rax, kPointerSizeLog2);
    295   } else {
    296     DCHECK(rax_is_tagged == kRaxIsUntaggedInt);
    297     __ movp(r11, rax);
    298     __ shlq(r11, Immediate(kPointerSizeLog2));
    299   }
    300   // Check if the arguments will overflow the stack.
    301   __ cmpp(rcx, r11);
    302   __ j(greater, &okay);  // Signed comparison.
    303 
    304   // Out of stack space.
    305   __ CallRuntime(Runtime::kThrowStackOverflow);
    306 
    307   __ bind(&okay);
    308 }
    309 
    310 
    311 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
    312                                              bool is_construct) {
    313   ProfileEntryHookStub::MaybeCallEntryHook(masm);
    314 
    315   // Expects five C++ function parameters.
    316   // - Object* new_target
    317   // - JSFunction* function
    318   // - Object* receiver
    319   // - int argc
    320   // - Object*** argv
    321   // (see Handle::Invoke in execution.cc).
    322 
    323   // Open a C++ scope for the FrameScope.
    324   {
    325     // Platform specific argument handling. After this, the stack contains
    326     // an internal frame and the pushed function and receiver, and
    327     // register rax and rbx holds the argument count and argument array,
    328     // while rdi holds the function pointer, rsi the context, and rdx the
    329     // new.target.
    330 
    331 #ifdef _WIN64
    332     // MSVC parameters in:
    333     // rcx        : new_target
    334     // rdx        : function
    335     // r8         : receiver
    336     // r9         : argc
    337     // [rsp+0x20] : argv
    338 
    339     // Enter an internal frame.
    340     FrameScope scope(masm, StackFrame::INTERNAL);
    341 
    342     // Setup the context (we need to use the caller context from the isolate).
    343     ExternalReference context_address(Isolate::kContextAddress,
    344                                       masm->isolate());
    345     __ movp(rsi, masm->ExternalOperand(context_address));
    346 
    347     // Push the function and the receiver onto the stack.
    348     __ Push(rdx);
    349     __ Push(r8);
    350 
    351     // Load the number of arguments and setup pointer to the arguments.
    352     __ movp(rax, r9);
    353     // Load the previous frame pointer to access C argument on stack
    354     __ movp(kScratchRegister, Operand(rbp, 0));
    355     __ movp(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
    356     // Load the function pointer into rdi.
    357     __ movp(rdi, rdx);
    358     // Load the new.target into rdx.
    359     __ movp(rdx, rcx);
    360 #else  // _WIN64
    361     // GCC parameters in:
    362     // rdi : new_target
    363     // rsi : function
    364     // rdx : receiver
    365     // rcx : argc
    366     // r8  : argv
    367 
    368     __ movp(r11, rdi);
    369     __ movp(rdi, rsi);
    370     // rdi : function
    371     // r11 : new_target
    372 
    373     // Clear the context before we push it when entering the internal frame.
    374     __ Set(rsi, 0);
    375 
    376     // Enter an internal frame.
    377     FrameScope scope(masm, StackFrame::INTERNAL);
    378 
    379     // Setup the context (we need to use the caller context from the isolate).
    380     ExternalReference context_address(Isolate::kContextAddress,
    381                                       masm->isolate());
    382     __ movp(rsi, masm->ExternalOperand(context_address));
    383 
    384     // Push the function and receiver onto the stack.
    385     __ Push(rdi);
    386     __ Push(rdx);
    387 
    388     // Load the number of arguments and setup pointer to the arguments.
    389     __ movp(rax, rcx);
    390     __ movp(rbx, r8);
    391 
    392     // Load the new.target into rdx.
    393     __ movp(rdx, r11);
    394 #endif  // _WIN64
    395 
    396     // Current stack contents:
    397     // [rsp + 2 * kPointerSize ... ] : Internal frame
    398     // [rsp + kPointerSize]          : function
    399     // [rsp]                         : receiver
    400     // Current register contents:
    401     // rax : argc
    402     // rbx : argv
    403     // rsi : context
    404     // rdi : function
    405     // rdx : new.target
    406 
    407     // Check if we have enough stack space to push all arguments.
    408     // Expects argument count in rax. Clobbers rcx, r11.
    409     Generate_CheckStackOverflow(masm, kRaxIsUntaggedInt);
    410 
    411     // Copy arguments to the stack in a loop.
    412     // Register rbx points to array of pointers to handle locations.
    413     // Push the values of these handles.
    414     Label loop, entry;
    415     __ Set(rcx, 0);  // Set loop variable to 0.
    416     __ jmp(&entry, Label::kNear);
    417     __ bind(&loop);
    418     __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
    419     __ Push(Operand(kScratchRegister, 0));  // dereference handle
    420     __ addp(rcx, Immediate(1));
    421     __ bind(&entry);
    422     __ cmpp(rcx, rax);
    423     __ j(not_equal, &loop);
    424 
    425     // Invoke the builtin code.
    426     Handle<Code> builtin = is_construct
    427                                ? masm->isolate()->builtins()->Construct()
    428                                : masm->isolate()->builtins()->Call();
    429     __ Call(builtin, RelocInfo::CODE_TARGET);
    430 
    431     // Exit the internal frame. Notice that this also removes the empty
    432     // context and the function left on the stack by the code
    433     // invocation.
    434   }
    435 
    436   // TODO(X64): Is argument correct? Is there a receiver to remove?
    437   __ ret(1 * kPointerSize);  // Remove receiver.
    438 }
    439 
    440 
    441 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
    442   Generate_JSEntryTrampolineHelper(masm, false);
    443 }
    444 
    445 
    446 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
    447   Generate_JSEntryTrampolineHelper(masm, true);
    448 }
    449 
    450 // static
    451 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
    452   // ----------- S t a t e -------------
    453   //  -- rax    : the value to pass to the generator
    454   //  -- rbx    : the JSGeneratorObject to resume
    455   //  -- rdx    : the resume mode (tagged)
    456   //  -- rsp[0] : return address
    457   // -----------------------------------
    458   __ AssertGeneratorObject(rbx);
    459 
    460   // Store input value into generator object.
    461   __ movp(FieldOperand(rbx, JSGeneratorObject::kInputOrDebugPosOffset), rax);
    462   __ RecordWriteField(rbx, JSGeneratorObject::kInputOrDebugPosOffset, rax, rcx,
    463                       kDontSaveFPRegs);
    464 
    465   // Store resume mode into generator object.
    466   __ movp(FieldOperand(rbx, JSGeneratorObject::kResumeModeOffset), rdx);
    467 
    468   // Load suspended function and context.
    469   __ movp(rsi, FieldOperand(rbx, JSGeneratorObject::kContextOffset));
    470   __ movp(rdi, FieldOperand(rbx, JSGeneratorObject::kFunctionOffset));
    471 
    472   // Flood function if we are stepping.
    473   Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
    474   Label stepping_prepared;
    475   ExternalReference last_step_action =
    476       ExternalReference::debug_last_step_action_address(masm->isolate());
    477   Operand last_step_action_operand = masm->ExternalOperand(last_step_action);
    478   STATIC_ASSERT(StepFrame > StepIn);
    479   __ cmpb(last_step_action_operand, Immediate(StepIn));
    480   __ j(greater_equal, &prepare_step_in_if_stepping);
    481 
    482   // Flood function if we need to continue stepping in the suspended generator.
    483   ExternalReference debug_suspended_generator =
    484       ExternalReference::debug_suspended_generator_address(masm->isolate());
    485   Operand debug_suspended_generator_operand =
    486       masm->ExternalOperand(debug_suspended_generator);
    487   __ cmpp(rbx, debug_suspended_generator_operand);
    488   __ j(equal, &prepare_step_in_suspended_generator);
    489   __ bind(&stepping_prepared);
    490 
    491   // Pop return address.
    492   __ PopReturnAddressTo(rax);
    493 
    494   // Push receiver.
    495   __ Push(FieldOperand(rbx, JSGeneratorObject::kReceiverOffset));
    496 
    497   // ----------- S t a t e -------------
    498   //  -- rax    : return address
    499   //  -- rbx    : the JSGeneratorObject to resume
    500   //  -- rdx    : the resume mode (tagged)
    501   //  -- rdi    : generator function
    502   //  -- rsi    : generator context
    503   //  -- rsp[0] : generator receiver
    504   // -----------------------------------
    505 
    506   // Push holes for arguments to generator function. Since the parser forced
    507   // context allocation for any variables in generators, the actual argument
    508   // values have already been copied into the context and these dummy values
    509   // will never be used.
    510   __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
    511   __ LoadSharedFunctionInfoSpecialField(
    512       rcx, rcx, SharedFunctionInfo::kFormalParameterCountOffset);
    513   {
    514     Label done_loop, loop;
    515     __ bind(&loop);
    516     __ subl(rcx, Immediate(1));
    517     __ j(carry, &done_loop, Label::kNear);
    518     __ PushRoot(Heap::kTheHoleValueRootIndex);
    519     __ jmp(&loop);
    520     __ bind(&done_loop);
    521   }
    522 
    523   // Dispatch on the kind of generator object.
    524   Label old_generator;
    525   __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
    526   __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kFunctionDataOffset));
    527   __ CmpObjectType(rcx, BYTECODE_ARRAY_TYPE, rcx);
    528   __ j(not_equal, &old_generator);
    529 
    530   // New-style (ignition/turbofan) generator object.
    531   {
    532     __ PushReturnAddressFrom(rax);
    533     __ movp(rax, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
    534     __ LoadSharedFunctionInfoSpecialField(
    535         rax, rax, SharedFunctionInfo::kFormalParameterCountOffset);
    536     // We abuse new.target both to indicate that this is a resume call and to
    537     // pass in the generator object.  In ordinary calls, new.target is always
    538     // undefined because generator functions are non-constructable.
    539     __ movp(rdx, rbx);
    540     __ jmp(FieldOperand(rdi, JSFunction::kCodeEntryOffset));
    541   }
    542 
    543   // Old-style (full-codegen) generator object.
    544   __ bind(&old_generator);
    545   {
    546     // Enter a new JavaScript frame, and initialize its slots as they were when
    547     // the generator was suspended.
    548     FrameScope scope(masm, StackFrame::MANUAL);
    549     __ PushReturnAddressFrom(rax);  // Return address.
    550     __ Push(rbp);                   // Caller's frame pointer.
    551     __ Move(rbp, rsp);
    552     __ Push(rsi);  // Callee's context.
    553     __ Push(rdi);  // Callee's JS Function.
    554 
    555     // Restore the operand stack.
    556     __ movp(rsi, FieldOperand(rbx, JSGeneratorObject::kOperandStackOffset));
    557     __ SmiToInteger32(rax, FieldOperand(rsi, FixedArray::kLengthOffset));
    558     {
    559       Label done_loop, loop;
    560       __ Set(rcx, 0);
    561       __ bind(&loop);
    562       __ cmpl(rcx, rax);
    563       __ j(equal, &done_loop, Label::kNear);
    564       __ Push(
    565           FieldOperand(rsi, rcx, times_pointer_size, FixedArray::kHeaderSize));
    566       __ addl(rcx, Immediate(1));
    567       __ jmp(&loop);
    568       __ bind(&done_loop);
    569     }
    570 
    571     // Reset operand stack so we don't leak.
    572     __ LoadRoot(FieldOperand(rbx, JSGeneratorObject::kOperandStackOffset),
    573                 Heap::kEmptyFixedArrayRootIndex);
    574 
    575     // Restore context.
    576     __ movp(rsi, FieldOperand(rbx, JSGeneratorObject::kContextOffset));
    577 
    578     // Resume the generator function at the continuation.
    579     __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
    580     __ movp(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset));
    581     __ SmiToInteger64(
    582         rcx, FieldOperand(rbx, JSGeneratorObject::kContinuationOffset));
    583     __ leap(rdx, FieldOperand(rdx, rcx, times_1, Code::kHeaderSize));
    584     __ Move(FieldOperand(rbx, JSGeneratorObject::kContinuationOffset),
    585             Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
    586     __ movp(rax, rbx);  // Continuation expects generator object in rax.
    587     __ jmp(rdx);
    588   }
    589 
    590   __ bind(&prepare_step_in_if_stepping);
    591   {
    592     FrameScope scope(masm, StackFrame::INTERNAL);
    593     __ Push(rbx);
    594     __ Push(rdx);
    595     __ Push(rdi);
    596     __ CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
    597     __ Pop(rdx);
    598     __ Pop(rbx);
    599     __ movp(rdi, FieldOperand(rbx, JSGeneratorObject::kFunctionOffset));
    600   }
    601   __ jmp(&stepping_prepared);
    602 
    603   __ bind(&prepare_step_in_suspended_generator);
    604   {
    605     FrameScope scope(masm, StackFrame::INTERNAL);
    606     __ Push(rbx);
    607     __ Push(rdx);
    608     __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
    609     __ Pop(rdx);
    610     __ Pop(rbx);
    611     __ movp(rdi, FieldOperand(rbx, JSGeneratorObject::kFunctionOffset));
    612   }
    613   __ jmp(&stepping_prepared);
    614 }
    615 
    616 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch1,
    617                                   Register scratch2) {
    618   Register args_count = scratch1;
    619   Register return_pc = scratch2;
    620 
    621   // Get the arguments + receiver count.
    622   __ movp(args_count,
    623           Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
    624   __ movl(args_count,
    625           FieldOperand(args_count, BytecodeArray::kParameterSizeOffset));
    626 
    627   // Leave the frame (also dropping the register file).
    628   __ leave();
    629 
    630   // Drop receiver + arguments.
    631   __ PopReturnAddressTo(return_pc);
    632   __ addp(rsp, args_count);
    633   __ PushReturnAddressFrom(return_pc);
    634 }
    635 
    636 // Generate code for entering a JS function with the interpreter.
    637 // On entry to the function the receiver and arguments have been pushed on the
    638 // stack left to right.  The actual argument count matches the formal parameter
    639 // count expected by the function.
    640 //
    641 // The live registers are:
    642 //   o rdi: the JS function object being called
    643 //   o rdx: the new target
    644 //   o rsi: our context
    645 //   o rbp: the caller's frame pointer
    646 //   o rsp: stack pointer (pointing to return address)
    647 //
    648 // The function builds an interpreter frame.  See InterpreterFrameConstants in
    649 // frames.h for its layout.
    650 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
    651   ProfileEntryHookStub::MaybeCallEntryHook(masm);
    652 
    653   // Open a frame scope to indicate that there is a frame on the stack.  The
    654   // MANUAL indicates that the scope shouldn't actually generate code to set up
    655   // the frame (that is done below).
    656   FrameScope frame_scope(masm, StackFrame::MANUAL);
    657   __ pushq(rbp);  // Caller's frame pointer.
    658   __ movp(rbp, rsp);
    659   __ Push(rsi);  // Callee's context.
    660   __ Push(rdi);  // Callee's JS function.
    661   __ Push(rdx);  // Callee's new target.
    662 
    663   // Get the bytecode array from the function object (or from the DebugInfo if
    664   // it is present) and load it into kInterpreterBytecodeArrayRegister.
    665   __ movp(rax, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
    666   Label load_debug_bytecode_array, bytecode_array_loaded;
    667   DCHECK_EQ(Smi::FromInt(0), DebugInfo::uninitialized());
    668   __ cmpp(FieldOperand(rax, SharedFunctionInfo::kDebugInfoOffset),
    669           Immediate(0));
    670   __ j(not_equal, &load_debug_bytecode_array);
    671   __ movp(kInterpreterBytecodeArrayRegister,
    672           FieldOperand(rax, SharedFunctionInfo::kFunctionDataOffset));
    673   __ bind(&bytecode_array_loaded);
    674 
    675   // Check function data field is actually a BytecodeArray object.
    676   Label bytecode_array_not_present;
    677   __ CompareRoot(kInterpreterBytecodeArrayRegister,
    678                  Heap::kUndefinedValueRootIndex);
    679   __ j(equal, &bytecode_array_not_present);
    680   if (FLAG_debug_code) {
    681     __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
    682     __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
    683                      rax);
    684     __ Assert(equal, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
    685   }
    686 
    687   // Load initial bytecode offset.
    688   __ movp(kInterpreterBytecodeOffsetRegister,
    689           Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));
    690 
    691   // Push bytecode array and Smi tagged bytecode offset.
    692   __ Push(kInterpreterBytecodeArrayRegister);
    693   __ Integer32ToSmi(rcx, kInterpreterBytecodeOffsetRegister);
    694   __ Push(rcx);
    695 
    696   // Allocate the local and temporary register file on the stack.
    697   {
    698     // Load frame size from the BytecodeArray object.
    699     __ movl(rcx, FieldOperand(kInterpreterBytecodeArrayRegister,
    700                               BytecodeArray::kFrameSizeOffset));
    701 
    702     // Do a stack check to ensure we don't go over the limit.
    703     Label ok;
    704     __ movp(rdx, rsp);
    705     __ subp(rdx, rcx);
    706     __ CompareRoot(rdx, Heap::kRealStackLimitRootIndex);
    707     __ j(above_equal, &ok, Label::kNear);
    708     __ CallRuntime(Runtime::kThrowStackOverflow);
    709     __ bind(&ok);
    710 
    711     // If ok, push undefined as the initial value for all register file entries.
    712     Label loop_header;
    713     Label loop_check;
    714     __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
    715     __ j(always, &loop_check);
    716     __ bind(&loop_header);
    717     // TODO(rmcilroy): Consider doing more than one push per loop iteration.
    718     __ Push(rdx);
    719     // Continue loop if not done.
    720     __ bind(&loop_check);
    721     __ subp(rcx, Immediate(kPointerSize));
    722     __ j(greater_equal, &loop_header, Label::kNear);
    723   }
    724 
    725   // Load accumulator and dispatch table into registers.
    726   __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
    727   __ Move(
    728       kInterpreterDispatchTableRegister,
    729       ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
    730 
    731   // Dispatch to the first bytecode handler for the function.
    732   __ movzxbp(rbx, Operand(kInterpreterBytecodeArrayRegister,
    733                           kInterpreterBytecodeOffsetRegister, times_1, 0));
    734   __ movp(rbx, Operand(kInterpreterDispatchTableRegister, rbx,
    735                        times_pointer_size, 0));
    736   __ call(rbx);
    737   masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
    738 
    739   // The return value is in rax.
    740   LeaveInterpreterFrame(masm, rbx, rcx);
    741   __ ret(0);
    742 
    743   // Load debug copy of the bytecode array.
    744   __ bind(&load_debug_bytecode_array);
    745   Register debug_info = kInterpreterBytecodeArrayRegister;
    746   __ movp(debug_info, FieldOperand(rax, SharedFunctionInfo::kDebugInfoOffset));
    747   __ movp(kInterpreterBytecodeArrayRegister,
    748           FieldOperand(debug_info, DebugInfo::kAbstractCodeIndex));
    749   __ jmp(&bytecode_array_loaded);
    750 
    751   // If the bytecode array is no longer present, then the underlying function
    752   // has been switched to a different kind of code and we heal the closure by
    753   // switching the code entry field over to the new code object as well.
    754   __ bind(&bytecode_array_not_present);
    755   __ leave();  // Leave the frame so we can tail call.
    756   __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
    757   __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kCodeOffset));
    758   __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
    759   __ movp(FieldOperand(rdi, JSFunction::kCodeEntryOffset), rcx);
    760   __ RecordWriteCodeEntryField(rdi, rcx, r15);
    761   __ jmp(rcx);
    762 }
    763 
    764 void Builtins::Generate_InterpreterMarkBaselineOnReturn(MacroAssembler* masm) {
    765   // Save the function and context for call to CompileBaseline.
    766   __ movp(rdi, Operand(rbp, StandardFrameConstants::kFunctionOffset));
    767   __ movp(kContextRegister,
    768           Operand(rbp, StandardFrameConstants::kContextOffset));
    769 
    770   // Leave the frame before recompiling for baseline so that we don't count as
    771   // an activation on the stack.
    772   LeaveInterpreterFrame(masm, rbx, rcx);
    773 
    774   {
    775     FrameScope frame_scope(masm, StackFrame::INTERNAL);
    776     // Push return value.
    777     __ Push(rax);
    778 
    779     // Push function as argument and compile for baseline.
    780     __ Push(rdi);
    781     __ CallRuntime(Runtime::kCompileBaseline);
    782 
    783     // Restore return value.
    784     __ Pop(rax);
    785   }
    786   __ ret(0);
    787 }
    788 
    789 static void Generate_InterpreterPushArgs(MacroAssembler* masm,
    790                                          bool push_receiver) {
    791   // ----------- S t a t e -------------
    792   //  -- rax : the number of arguments (not including the receiver)
    793   //  -- rbx : the address of the first argument to be pushed. Subsequent
    794   //           arguments should be consecutive above this, in the same order as
    795   //           they are to be pushed onto the stack.
    796   // -----------------------------------
    797 
    798   // Find the address of the last argument.
    799   __ movp(rcx, rax);
    800   if (push_receiver) {
    801     __ addp(rcx, Immediate(1));  // Add one for receiver.
    802   }
    803 
    804   __ shlp(rcx, Immediate(kPointerSizeLog2));
    805   __ negp(rcx);
    806   __ addp(rcx, rbx);
    807 
    808   // Push the arguments.
    809   Label loop_header, loop_check;
    810   __ j(always, &loop_check);
    811   __ bind(&loop_header);
    812   __ Push(Operand(rbx, 0));
    813   __ subp(rbx, Immediate(kPointerSize));
    814   __ bind(&loop_check);
    815   __ cmpp(rbx, rcx);
    816   __ j(greater, &loop_header, Label::kNear);
    817 }
    818 
    819 // static
    820 void Builtins::Generate_InterpreterPushArgsAndCallImpl(
    821     MacroAssembler* masm, TailCallMode tail_call_mode) {
    822   // ----------- S t a t e -------------
    823   //  -- rax : the number of arguments (not including the receiver)
    824   //  -- rbx : the address of the first argument to be pushed. Subsequent
    825   //           arguments should be consecutive above this, in the same order as
    826   //           they are to be pushed onto the stack.
    827   //  -- rdi : the target to call (can be any Object).
    828   // -----------------------------------
    829 
    830   // Pop return address to allow tail-call after pushing arguments.
    831   __ PopReturnAddressTo(kScratchRegister);
    832 
    833   Generate_InterpreterPushArgs(masm, true);
    834 
    835   // Call the target.
    836   __ PushReturnAddressFrom(kScratchRegister);  // Re-push return address.
    837   __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
    838                                             tail_call_mode),
    839           RelocInfo::CODE_TARGET);
    840 }
    841 
    842 // static
    843 void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
    844   // ----------- S t a t e -------------
    845   //  -- rax : the number of arguments (not including the receiver)
    846   //  -- rdx : the new target (either the same as the constructor or
    847   //           the JSFunction on which new was invoked initially)
    848   //  -- rdi : the constructor to call (can be any Object)
    849   //  -- rbx : the address of the first argument to be pushed. Subsequent
    850   //           arguments should be consecutive above this, in the same order as
    851   //           they are to be pushed onto the stack.
    852   // -----------------------------------
    853 
    854   // Pop return address to allow tail-call after pushing arguments.
    855   __ PopReturnAddressTo(kScratchRegister);
    856 
    857   // Push slot for the receiver to be constructed.
    858   __ Push(Immediate(0));
    859 
    860   Generate_InterpreterPushArgs(masm, false);
    861 
    862   // Push return address in preparation for the tail-call.
    863   __ PushReturnAddressFrom(kScratchRegister);
    864 
    865   // Call the constructor (rax, rdx, rdi passed on).
    866   __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
    867 }
    868 
    869 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
    870   // Set the return address to the correct point in the interpreter entry
    871   // trampoline.
    872   Smi* interpreter_entry_return_pc_offset(
    873       masm->isolate()->heap()->interpreter_entry_return_pc_offset());
    874   DCHECK_NE(interpreter_entry_return_pc_offset, Smi::FromInt(0));
    875   __ Move(rbx, masm->isolate()->builtins()->InterpreterEntryTrampoline());
    876   __ addp(rbx, Immediate(interpreter_entry_return_pc_offset->value() +
    877                          Code::kHeaderSize - kHeapObjectTag));
    878   __ Push(rbx);
    879 
    880   // Initialize dispatch table register.
    881   __ Move(
    882       kInterpreterDispatchTableRegister,
    883       ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
    884 
    885   // Get the bytecode array pointer from the frame.
    886   __ movp(kInterpreterBytecodeArrayRegister,
    887           Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
    888 
    889   if (FLAG_debug_code) {
    890     // Check function data field is actually a BytecodeArray object.
    891     __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
    892     __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
    893                      rbx);
    894     __ Assert(equal, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
    895   }
    896 
    897   // Get the target bytecode offset from the frame.
    898   __ movp(kInterpreterBytecodeOffsetRegister,
    899           Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
    900   __ SmiToInteger32(kInterpreterBytecodeOffsetRegister,
    901                     kInterpreterBytecodeOffsetRegister);
    902 
    903   // Dispatch to the target bytecode.
    904   __ movzxbp(rbx, Operand(kInterpreterBytecodeArrayRegister,
    905                           kInterpreterBytecodeOffsetRegister, times_1, 0));
    906   __ movp(rbx, Operand(kInterpreterDispatchTableRegister, rbx,
    907                        times_pointer_size, 0));
    908   __ jmp(rbx);
    909 }
    910 
    911 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
    912   // ----------- S t a t e -------------
    913   //  -- rax : argument count (preserved for callee)
    914   //  -- rdx : new target (preserved for callee)
    915   //  -- rdi : target function (preserved for callee)
    916   // -----------------------------------
    917   // First lookup code, maybe we don't need to compile!
    918   Label gotta_call_runtime;
    919   Label maybe_call_runtime;
    920   Label try_shared;
    921   Label loop_top, loop_bottom;
    922 
    923   Register closure = rdi;
    924   Register map = r8;
    925   Register index = r9;
    926   __ movp(map, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
    927   __ movp(map, FieldOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
    928   __ SmiToInteger32(index, FieldOperand(map, FixedArray::kLengthOffset));
    929   __ cmpl(index, Immediate(2));
    930   __ j(less, &gotta_call_runtime);
    931 
    932   // Find literals.
    933   // r14 : native context
    934   // r9  : length / index
    935   // r8  : optimized code map
    936   // rdx : new target
    937   // rdi : closure
    938   Register native_context = r14;
    939   __ movp(native_context, NativeContextOperand());
    940 
    941   __ bind(&loop_top);
    942   // Native context match?
    943   Register temp = r11;
    944   __ movp(temp, FieldOperand(map, index, times_pointer_size,
    945                              SharedFunctionInfo::kOffsetToPreviousContext));
    946   __ movp(temp, FieldOperand(temp, WeakCell::kValueOffset));
    947   __ cmpp(temp, native_context);
    948   __ j(not_equal, &loop_bottom);
    949   // OSR id set to none?
    950   __ movp(temp, FieldOperand(map, index, times_pointer_size,
    951                              SharedFunctionInfo::kOffsetToPreviousOsrAstId));
    952   __ SmiToInteger32(temp, temp);
    953   const int bailout_id = BailoutId::None().ToInt();
    954   __ cmpl(temp, Immediate(bailout_id));
    955   __ j(not_equal, &loop_bottom);
    956 
    957   // Literals available?
    958   Label got_literals, maybe_cleared_weakcell;
    959   __ movp(temp, FieldOperand(map, index, times_pointer_size,
    960                              SharedFunctionInfo::kOffsetToPreviousLiterals));
    961   // temp contains either a WeakCell pointing to the literals array or the
    962   // literals array directly.
    963   STATIC_ASSERT(WeakCell::kValueOffset == FixedArray::kLengthOffset);
    964   __ movp(r15, FieldOperand(temp, WeakCell::kValueOffset));
    965   __ JumpIfSmi(r15, &maybe_cleared_weakcell);
    966   // r15 is a pointer, therefore temp is a WeakCell pointing to a literals
    967   // array.
    968   __ movp(temp, FieldOperand(temp, WeakCell::kValueOffset));
    969   __ jmp(&got_literals);
    970 
    971   // r15 is a smi. If it's 0, then we are looking at a cleared WeakCell
    972   // around the literals array, and we should visit the runtime. If it's > 0,
    973   // then temp already contains the literals array.
    974   __ bind(&maybe_cleared_weakcell);
    975   __ cmpp(r15, Immediate(0));
    976   __ j(equal, &gotta_call_runtime);
    977 
    978   // Save the literals in the closure.
    979   __ bind(&got_literals);
    980   __ movp(FieldOperand(closure, JSFunction::kLiteralsOffset), temp);
    981   __ movp(r15, index);
    982   __ RecordWriteField(closure, JSFunction::kLiteralsOffset, temp, r15,
    983                       kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
    984 
    985   // Code available?
    986   Register entry = rcx;
    987   __ movp(entry, FieldOperand(map, index, times_pointer_size,
    988                               SharedFunctionInfo::kOffsetToPreviousCachedCode));
    989   __ movp(entry, FieldOperand(entry, WeakCell::kValueOffset));
    990   __ JumpIfSmi(entry, &maybe_call_runtime);
    991 
    992   // Found literals and code. Get them into the closure and return.
    993   __ leap(entry, FieldOperand(entry, Code::kHeaderSize));
    994 
    995   Label install_optimized_code_and_tailcall;
    996   __ bind(&install_optimized_code_and_tailcall);
    997   __ movp(FieldOperand(closure, JSFunction::kCodeEntryOffset), entry);
    998   __ RecordWriteCodeEntryField(closure, entry, r15);
    999 
   1000   // Link the closure into the optimized function list.
   1001   // rcx : code entry (entry)
   1002   // r14 : native context
   1003   // rdx : new target
   1004   // rdi : closure
   1005   __ movp(rbx,
   1006           ContextOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
   1007   __ movp(FieldOperand(closure, JSFunction::kNextFunctionLinkOffset), rbx);
   1008   __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, rbx, r15,
   1009                       kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
   1010   const int function_list_offset =
   1011       Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST);
   1012   __ movp(ContextOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST),
   1013           closure);
   1014   // Save closure before the write barrier.
   1015   __ movp(rbx, closure);
   1016   __ RecordWriteContextSlot(native_context, function_list_offset, closure, r15,
   1017                             kDontSaveFPRegs);
   1018   __ movp(closure, rbx);
   1019   __ jmp(entry);
   1020 
   1021   __ bind(&loop_bottom);
   1022   __ subl(index, Immediate(SharedFunctionInfo::kEntryLength));
   1023   __ cmpl(index, Immediate(1));
   1024   __ j(greater, &loop_top);
   1025 
   1026   // We found neither literals nor code.
   1027   __ jmp(&gotta_call_runtime);
   1028 
   1029   __ bind(&maybe_call_runtime);
   1030 
   1031   // Last possibility. Check the context free optimized code map entry.
   1032   __ movp(entry, FieldOperand(map, FixedArray::kHeaderSize +
   1033                                        SharedFunctionInfo::kSharedCodeIndex));
   1034   __ movp(entry, FieldOperand(entry, WeakCell::kValueOffset));
   1035   __ JumpIfSmi(entry, &try_shared);
   1036 
   1037   // Store code entry in the closure.
   1038   __ leap(entry, FieldOperand(entry, Code::kHeaderSize));
   1039   __ jmp(&install_optimized_code_and_tailcall);
   1040 
   1041   __ bind(&try_shared);
   1042   // Is the full code valid?
   1043   __ movp(entry, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
   1044   __ movp(entry, FieldOperand(entry, SharedFunctionInfo::kCodeOffset));
   1045   __ movl(rbx, FieldOperand(entry, Code::kFlagsOffset));
   1046   __ andl(rbx, Immediate(Code::KindField::kMask));
   1047   __ shrl(rbx, Immediate(Code::KindField::kShift));
   1048   __ cmpl(rbx, Immediate(Code::BUILTIN));
   1049   __ j(equal, &gotta_call_runtime);
   1050   // Yes, install the full code.
   1051   __ leap(entry, FieldOperand(entry, Code::kHeaderSize));
   1052   __ movp(FieldOperand(closure, JSFunction::kCodeEntryOffset), entry);
   1053   __ RecordWriteCodeEntryField(closure, entry, r15);
   1054   __ jmp(entry);
   1055 
   1056   __ bind(&gotta_call_runtime);
   1057   GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
   1058 }
   1059 
   1060 void Builtins::Generate_CompileBaseline(MacroAssembler* masm) {
   1061   GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline);
   1062 }
   1063 
   1064 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
   1065   GenerateTailCallToReturnedCode(masm,
   1066                                  Runtime::kCompileOptimized_NotConcurrent);
   1067 }
   1068 
   1069 
   1070 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
   1071   GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
   1072 }
   1073 
   1074 
   1075 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
   1076   // For now, we are relying on the fact that make_code_young doesn't do any
   1077   // garbage collection which allows us to save/restore the registers without
   1078   // worrying about which of them contain pointers. We also don't build an
   1079   // internal frame to make the code faster, since we shouldn't have to do stack
   1080   // crawls in MakeCodeYoung. This seems a bit fragile.
   1081 
   1082   // Re-execute the code that was patched back to the young age when
   1083   // the stub returns.
   1084   __ subp(Operand(rsp, 0), Immediate(5));
   1085   __ Pushad();
   1086   __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
   1087   __ movp(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize));
   1088   {  // NOLINT
   1089     FrameScope scope(masm, StackFrame::MANUAL);
   1090     __ PrepareCallCFunction(2);
   1091     __ CallCFunction(
   1092         ExternalReference::get_make_code_young_function(masm->isolate()), 2);
   1093   }
   1094   __ Popad();
   1095   __ ret(0);
   1096 }
   1097 
   1098 
   1099 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C)                 \
   1100 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking(  \
   1101     MacroAssembler* masm) {                                  \
   1102   GenerateMakeCodeYoungAgainCommon(masm);                    \
   1103 }                                                            \
   1104 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking(   \
   1105     MacroAssembler* masm) {                                  \
   1106   GenerateMakeCodeYoungAgainCommon(masm);                    \
   1107 }
   1108 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
   1109 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
   1110 
   1111 
   1112 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
   1113   // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
   1114   // that make_code_young doesn't do any garbage collection which allows us to
   1115   // save/restore the registers without worrying about which of them contain
   1116   // pointers.
   1117   __ Pushad();
   1118   __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
   1119   __ movp(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize));
   1120   __ subp(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength));
   1121   {  // NOLINT
   1122     FrameScope scope(masm, StackFrame::MANUAL);
   1123     __ PrepareCallCFunction(2);
   1124     __ CallCFunction(
   1125         ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
   1126         2);
   1127   }
   1128   __ Popad();
   1129 
   1130   // Perform prologue operations usually performed by the young code stub.
   1131   __ PopReturnAddressTo(kScratchRegister);
   1132   __ pushq(rbp);  // Caller's frame pointer.
   1133   __ movp(rbp, rsp);
   1134   __ Push(rsi);  // Callee's context.
   1135   __ Push(rdi);  // Callee's JS Function.
   1136   __ PushReturnAddressFrom(kScratchRegister);
   1137 
   1138   // Jump to point after the code-age stub.
   1139   __ ret(0);
   1140 }
   1141 
   1142 
   1143 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
   1144   GenerateMakeCodeYoungAgainCommon(masm);
   1145 }
   1146 
   1147 
   1148 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
   1149   Generate_MarkCodeAsExecutedOnce(masm);
   1150 }
   1151 
   1152 
   1153 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
   1154                                              SaveFPRegsMode save_doubles) {
   1155   // Enter an internal frame.
   1156   {
   1157     FrameScope scope(masm, StackFrame::INTERNAL);
   1158 
   1159     // Preserve registers across notification, this is important for compiled
   1160     // stubs that tail call the runtime on deopts passing their parameters in
   1161     // registers.
   1162     __ Pushad();
   1163     __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
   1164     __ Popad();
   1165     // Tear down internal frame.
   1166   }
   1167 
   1168   __ DropUnderReturnAddress(1);  // Ignore state offset
   1169   __ ret(0);  // Return to IC Miss stub, continuation still on stack.
   1170 }
   1171 
   1172 
   1173 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
   1174   Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
   1175 }
   1176 
   1177 
   1178 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
   1179   Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
   1180 }
   1181 
   1182 
   1183 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
   1184                                              Deoptimizer::BailoutType type) {
   1185   // Enter an internal frame.
   1186   {
   1187     FrameScope scope(masm, StackFrame::INTERNAL);
   1188 
   1189     // Pass the deoptimization type to the runtime system.
   1190     __ Push(Smi::FromInt(static_cast<int>(type)));
   1191 
   1192     __ CallRuntime(Runtime::kNotifyDeoptimized);
   1193     // Tear down internal frame.
   1194   }
   1195 
   1196   // Get the full codegen state from the stack and untag it.
   1197   __ SmiToInteger32(kScratchRegister, Operand(rsp, kPCOnStackSize));
   1198 
   1199   // Switch on the state.
   1200   Label not_no_registers, not_tos_rax;
   1201   __ cmpp(kScratchRegister,
   1202           Immediate(static_cast<int>(Deoptimizer::BailoutState::NO_REGISTERS)));
   1203   __ j(not_equal, &not_no_registers, Label::kNear);
   1204   __ ret(1 * kPointerSize);  // Remove state.
   1205 
   1206   __ bind(&not_no_registers);
   1207   DCHECK_EQ(kInterpreterAccumulatorRegister.code(), rax.code());
   1208   __ movp(rax, Operand(rsp, kPCOnStackSize + kPointerSize));
   1209   __ cmpp(kScratchRegister,
   1210           Immediate(static_cast<int>(Deoptimizer::BailoutState::TOS_REGISTER)));
   1211   __ j(not_equal, &not_tos_rax, Label::kNear);
   1212   __ ret(2 * kPointerSize);  // Remove state, rax.
   1213 
   1214   __ bind(&not_tos_rax);
   1215   __ Abort(kNoCasesLeft);
   1216 }
   1217 
   1218 
   1219 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
   1220   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
   1221 }
   1222 
   1223 
   1224 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
   1225   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
   1226 }
   1227 
   1228 
   1229 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
   1230   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
   1231 }
   1232 
   1233 
   1234 // static
   1235 void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
   1236                                                int field_index) {
   1237   // ----------- S t a t e -------------
   1238   //  -- rax    : number of arguments
   1239   //  -- rdi    : function
   1240   //  -- rsi    : context
   1241   //  -- rsp[0] : return address
   1242   //  -- rsp[8] : receiver
   1243   // -----------------------------------
   1244 
   1245   // 1. Load receiver into rax and check that it's actually a JSDate object.
   1246   Label receiver_not_date;
   1247   {
   1248     StackArgumentsAccessor args(rsp, 0);
   1249     __ movp(rax, args.GetReceiverOperand());
   1250     __ JumpIfSmi(rax, &receiver_not_date);
   1251     __ CmpObjectType(rax, JS_DATE_TYPE, rbx);
   1252     __ j(not_equal, &receiver_not_date);
   1253   }
   1254 
   1255   // 2. Load the specified date field, falling back to the runtime as necessary.
   1256   if (field_index == JSDate::kDateValue) {
   1257     __ movp(rax, FieldOperand(rax, JSDate::kValueOffset));
   1258   } else {
   1259     if (field_index < JSDate::kFirstUncachedField) {
   1260       Label stamp_mismatch;
   1261       __ Load(rdx, ExternalReference::date_cache_stamp(masm->isolate()));
   1262       __ cmpp(rdx, FieldOperand(rax, JSDate::kCacheStampOffset));
   1263       __ j(not_equal, &stamp_mismatch, Label::kNear);
   1264       __ movp(rax, FieldOperand(
   1265                        rax, JSDate::kValueOffset + field_index * kPointerSize));
   1266       __ ret(1 * kPointerSize);
   1267       __ bind(&stamp_mismatch);
   1268     }
   1269     FrameScope scope(masm, StackFrame::INTERNAL);
   1270     __ PrepareCallCFunction(2);
   1271     __ Move(arg_reg_1, rax);
   1272     __ Move(arg_reg_2, Smi::FromInt(field_index));
   1273     __ CallCFunction(
   1274         ExternalReference::get_date_field_function(masm->isolate()), 2);
   1275   }
   1276   __ ret(1 * kPointerSize);
   1277 
   1278   // 3. Raise a TypeError if the receiver is not a date.
   1279   __ bind(&receiver_not_date);
   1280   {
   1281     FrameScope scope(masm, StackFrame::MANUAL);
   1282     __ Push(rbp);
   1283     __ Move(rbp, rsp);
   1284     __ Push(rsi);
   1285     __ Push(rdi);
   1286     __ Push(Immediate(0));
   1287     __ CallRuntime(Runtime::kThrowNotDateError);
   1288   }
   1289 }
   1290 
   1291 // static
   1292 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
   1293   // ----------- S t a t e -------------
   1294   //  -- rax     : argc
   1295   //  -- rsp[0]  : return address
   1296   //  -- rsp[8]  : argArray
   1297   //  -- rsp[16] : thisArg
   1298   //  -- rsp[24] : receiver
   1299   // -----------------------------------
   1300 
   1301   // 1. Load receiver into rdi, argArray into rax (if present), remove all
   1302   // arguments from the stack (including the receiver), and push thisArg (if
   1303   // present) instead.
   1304   {
   1305     Label no_arg_array, no_this_arg;
   1306     StackArgumentsAccessor args(rsp, rax);
   1307     __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
   1308     __ movp(rbx, rdx);
   1309     __ movp(rdi, args.GetReceiverOperand());
   1310     __ testp(rax, rax);
   1311     __ j(zero, &no_this_arg, Label::kNear);
   1312     {
   1313       __ movp(rdx, args.GetArgumentOperand(1));
   1314       __ cmpp(rax, Immediate(1));
   1315       __ j(equal, &no_arg_array, Label::kNear);
   1316       __ movp(rbx, args.GetArgumentOperand(2));
   1317       __ bind(&no_arg_array);
   1318     }
   1319     __ bind(&no_this_arg);
   1320     __ PopReturnAddressTo(rcx);
   1321     __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
   1322     __ Push(rdx);
   1323     __ PushReturnAddressFrom(rcx);
   1324     __ movp(rax, rbx);
   1325   }
   1326 
   1327   // ----------- S t a t e -------------
   1328   //  -- rax     : argArray
   1329   //  -- rdi     : receiver
   1330   //  -- rsp[0]  : return address
   1331   //  -- rsp[8]  : thisArg
   1332   // -----------------------------------
   1333 
   1334   // 2. Make sure the receiver is actually callable.
   1335   Label receiver_not_callable;
   1336   __ JumpIfSmi(rdi, &receiver_not_callable, Label::kNear);
   1337   __ movp(rcx, FieldOperand(rdi, HeapObject::kMapOffset));
   1338   __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
   1339            Immediate(1 << Map::kIsCallable));
   1340   __ j(zero, &receiver_not_callable, Label::kNear);
   1341 
   1342   // 3. Tail call with no arguments if argArray is null or undefined.
   1343   Label no_arguments;
   1344   __ JumpIfRoot(rax, Heap::kNullValueRootIndex, &no_arguments, Label::kNear);
   1345   __ JumpIfRoot(rax, Heap::kUndefinedValueRootIndex, &no_arguments,
   1346                 Label::kNear);
   1347 
   1348   // 4a. Apply the receiver to the given argArray (passing undefined for
   1349   // new.target).
   1350   __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
   1351   __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
   1352 
   1353   // 4b. The argArray is either null or undefined, so we tail call without any
   1354   // arguments to the receiver. Since we did not create a frame for
   1355   // Function.prototype.apply() yet, we use a normal Call builtin here.
   1356   __ bind(&no_arguments);
   1357   {
   1358     __ Set(rax, 0);
   1359     __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
   1360   }
   1361 
   1362   // 4c. The receiver is not callable, throw an appropriate TypeError.
   1363   __ bind(&receiver_not_callable);
   1364   {
   1365     StackArgumentsAccessor args(rsp, 0);
   1366     __ movp(args.GetReceiverOperand(), rdi);
   1367     __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
   1368   }
   1369 }
   1370 
   1371 
   1372 // static
   1373 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
   1374   // Stack Layout:
   1375   // rsp[0]           : Return address
   1376   // rsp[8]           : Argument n
   1377   // rsp[16]          : Argument n-1
   1378   //  ...
   1379   // rsp[8 * n]       : Argument 1
   1380   // rsp[8 * (n + 1)] : Receiver (callable to call)
   1381   //
   1382   // rax contains the number of arguments, n, not counting the receiver.
   1383   //
   1384   // 1. Make sure we have at least one argument.
   1385   {
   1386     Label done;
   1387     __ testp(rax, rax);
   1388     __ j(not_zero, &done, Label::kNear);
   1389     __ PopReturnAddressTo(rbx);
   1390     __ PushRoot(Heap::kUndefinedValueRootIndex);
   1391     __ PushReturnAddressFrom(rbx);
   1392     __ incp(rax);
   1393     __ bind(&done);
   1394   }
   1395 
   1396   // 2. Get the callable to call (passed as receiver) from the stack.
   1397   {
   1398     StackArgumentsAccessor args(rsp, rax);
   1399     __ movp(rdi, args.GetReceiverOperand());
   1400   }
   1401 
   1402   // 3. Shift arguments and return address one slot down on the stack
   1403   //    (overwriting the original receiver).  Adjust argument count to make
   1404   //    the original first argument the new receiver.
   1405   {
   1406     Label loop;
   1407     __ movp(rcx, rax);
   1408     StackArgumentsAccessor args(rsp, rcx);
   1409     __ bind(&loop);
   1410     __ movp(rbx, args.GetArgumentOperand(1));
   1411     __ movp(args.GetArgumentOperand(0), rbx);
   1412     __ decp(rcx);
   1413     __ j(not_zero, &loop);              // While non-zero.
   1414     __ DropUnderReturnAddress(1, rbx);  // Drop one slot under return address.
   1415     __ decp(rax);  // One fewer argument (first argument is new receiver).
   1416   }
   1417 
   1418   // 4. Call the callable.
   1419   // Since we did not create a frame for Function.prototype.call() yet,
   1420   // we use a normal Call builtin here.
   1421   __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
   1422 }
   1423 
   1424 
   1425 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
   1426   // ----------- S t a t e -------------
   1427   //  -- rax     : argc
   1428   //  -- rsp[0]  : return address
   1429   //  -- rsp[8]  : argumentsList
   1430   //  -- rsp[16] : thisArgument
   1431   //  -- rsp[24] : target
   1432   //  -- rsp[32] : receiver
   1433   // -----------------------------------
   1434 
   1435   // 1. Load target into rdi (if present), argumentsList into rax (if present),
   1436   // remove all arguments from the stack (including the receiver), and push
   1437   // thisArgument (if present) instead.
   1438   {
   1439     Label done;
   1440     StackArgumentsAccessor args(rsp, rax);
   1441     __ LoadRoot(rdi, Heap::kUndefinedValueRootIndex);
   1442     __ movp(rdx, rdi);
   1443     __ movp(rbx, rdi);
   1444     __ cmpp(rax, Immediate(1));
   1445     __ j(below, &done, Label::kNear);
   1446     __ movp(rdi, args.GetArgumentOperand(1));  // target
   1447     __ j(equal, &done, Label::kNear);
   1448     __ movp(rdx, args.GetArgumentOperand(2));  // thisArgument
   1449     __ cmpp(rax, Immediate(3));
   1450     __ j(below, &done, Label::kNear);
   1451     __ movp(rbx, args.GetArgumentOperand(3));  // argumentsList
   1452     __ bind(&done);
   1453     __ PopReturnAddressTo(rcx);
   1454     __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
   1455     __ Push(rdx);
   1456     __ PushReturnAddressFrom(rcx);
   1457     __ movp(rax, rbx);
   1458   }
   1459 
   1460   // ----------- S t a t e -------------
   1461   //  -- rax     : argumentsList
   1462   //  -- rdi     : target
   1463   //  -- rsp[0]  : return address
   1464   //  -- rsp[8]  : thisArgument
   1465   // -----------------------------------
   1466 
   1467   // 2. Make sure the target is actually callable.
   1468   Label target_not_callable;
   1469   __ JumpIfSmi(rdi, &target_not_callable, Label::kNear);
   1470   __ movp(rcx, FieldOperand(rdi, HeapObject::kMapOffset));
   1471   __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
   1472            Immediate(1 << Map::kIsCallable));
   1473   __ j(zero, &target_not_callable, Label::kNear);
   1474 
   1475   // 3a. Apply the target to the given argumentsList (passing undefined for
   1476   // new.target).
   1477   __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
   1478   __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
   1479 
   1480   // 3b. The target is not callable, throw an appropriate TypeError.
   1481   __ bind(&target_not_callable);
   1482   {
   1483     StackArgumentsAccessor args(rsp, 0);
   1484     __ movp(args.GetReceiverOperand(), rdi);
   1485     __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
   1486   }
   1487 }
   1488 
   1489 
   1490 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
   1491   // ----------- S t a t e -------------
   1492   //  -- rax     : argc
   1493   //  -- rsp[0]  : return address
   1494   //  -- rsp[8]  : new.target (optional)
   1495   //  -- rsp[16] : argumentsList
   1496   //  -- rsp[24] : target
   1497   //  -- rsp[32] : receiver
   1498   // -----------------------------------
   1499 
   1500   // 1. Load target into rdi (if present), argumentsList into rax (if present),
   1501   // new.target into rdx (if present, otherwise use target), remove all
   1502   // arguments from the stack (including the receiver), and push thisArgument
   1503   // (if present) instead.
   1504   {
   1505     Label done;
   1506     StackArgumentsAccessor args(rsp, rax);
   1507     __ LoadRoot(rdi, Heap::kUndefinedValueRootIndex);
   1508     __ movp(rdx, rdi);
   1509     __ movp(rbx, rdi);
   1510     __ cmpp(rax, Immediate(1));
   1511     __ j(below, &done, Label::kNear);
   1512     __ movp(rdi, args.GetArgumentOperand(1));  // target
   1513     __ movp(rdx, rdi);                         // new.target defaults to target
   1514     __ j(equal, &done, Label::kNear);
   1515     __ movp(rbx, args.GetArgumentOperand(2));  // argumentsList
   1516     __ cmpp(rax, Immediate(3));
   1517     __ j(below, &done, Label::kNear);
   1518     __ movp(rdx, args.GetArgumentOperand(3));  // new.target
   1519     __ bind(&done);
   1520     __ PopReturnAddressTo(rcx);
   1521     __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
   1522     __ PushRoot(Heap::kUndefinedValueRootIndex);
   1523     __ PushReturnAddressFrom(rcx);
   1524     __ movp(rax, rbx);
   1525   }
   1526 
   1527   // ----------- S t a t e -------------
   1528   //  -- rax     : argumentsList
   1529   //  -- rdx     : new.target
   1530   //  -- rdi     : target
   1531   //  -- rsp[0]  : return address
   1532   //  -- rsp[8]  : receiver (undefined)
   1533   // -----------------------------------
   1534 
   1535   // 2. Make sure the target is actually a constructor.
   1536   Label target_not_constructor;
   1537   __ JumpIfSmi(rdi, &target_not_constructor, Label::kNear);
   1538   __ movp(rcx, FieldOperand(rdi, HeapObject::kMapOffset));
   1539   __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
   1540            Immediate(1 << Map::kIsConstructor));
   1541   __ j(zero, &target_not_constructor, Label::kNear);
   1542 
   1543   // 3. Make sure the target is actually a constructor.
   1544   Label new_target_not_constructor;
   1545   __ JumpIfSmi(rdx, &new_target_not_constructor, Label::kNear);
   1546   __ movp(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
   1547   __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
   1548            Immediate(1 << Map::kIsConstructor));
   1549   __ j(zero, &new_target_not_constructor, Label::kNear);
   1550 
   1551   // 4a. Construct the target with the given new.target and argumentsList.
   1552   __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
   1553 
   1554   // 4b. The target is not a constructor, throw an appropriate TypeError.
   1555   __ bind(&target_not_constructor);
   1556   {
   1557     StackArgumentsAccessor args(rsp, 0);
   1558     __ movp(args.GetReceiverOperand(), rdi);
   1559     __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
   1560   }
   1561 
   1562   // 4c. The new.target is not a constructor, throw an appropriate TypeError.
   1563   __ bind(&new_target_not_constructor);
   1564   {
   1565     StackArgumentsAccessor args(rsp, 0);
   1566     __ movp(args.GetReceiverOperand(), rdx);
   1567     __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
   1568   }
   1569 }
   1570 
   1571 
   1572 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
   1573   // ----------- S t a t e -------------
   1574   //  -- rax    : argc
   1575   //  -- rsp[0] : return address
   1576   //  -- rsp[8] : last argument
   1577   // -----------------------------------
   1578   Label generic_array_code;
   1579 
   1580   // Get the InternalArray function.
   1581   __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, rdi);
   1582 
   1583   if (FLAG_debug_code) {
   1584     // Initial map for the builtin InternalArray functions should be maps.
   1585     __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
   1586     // Will both indicate a NULL and a Smi.
   1587     STATIC_ASSERT(kSmiTag == 0);
   1588     Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
   1589     __ Check(not_smi, kUnexpectedInitialMapForInternalArrayFunction);
   1590     __ CmpObjectType(rbx, MAP_TYPE, rcx);
   1591     __ Check(equal, kUnexpectedInitialMapForInternalArrayFunction);
   1592   }
   1593 
   1594   // Run the native code for the InternalArray function called as a normal
   1595   // function.
   1596   // tail call a stub
   1597   InternalArrayConstructorStub stub(masm->isolate());
   1598   __ TailCallStub(&stub);
   1599 }
   1600 
   1601 
   1602 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
   1603   // ----------- S t a t e -------------
   1604   //  -- rax    : argc
   1605   //  -- rsp[0] : return address
   1606   //  -- rsp[8] : last argument
   1607   // -----------------------------------
   1608   Label generic_array_code;
   1609 
   1610   // Get the Array function.
   1611   __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, rdi);
   1612 
   1613   if (FLAG_debug_code) {
   1614     // Initial map for the builtin Array functions should be maps.
   1615     __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
   1616     // Will both indicate a NULL and a Smi.
   1617     STATIC_ASSERT(kSmiTag == 0);
   1618     Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
   1619     __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
   1620     __ CmpObjectType(rbx, MAP_TYPE, rcx);
   1621     __ Check(equal, kUnexpectedInitialMapForArrayFunction);
   1622   }
   1623 
   1624   __ movp(rdx, rdi);
   1625   // Run the native code for the Array function called as a normal function.
   1626   // tail call a stub
   1627   __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
   1628   ArrayConstructorStub stub(masm->isolate());
   1629   __ TailCallStub(&stub);
   1630 }
   1631 
   1632 
   1633 // static
   1634 void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
   1635   // ----------- S t a t e -------------
   1636   //  -- rax                 : number of arguments
   1637   //  -- rdi                 : function
   1638   //  -- rsi                 : context
   1639   //  -- rsp[0]              : return address
   1640   //  -- rsp[(argc - n) * 8] : arg[n] (zero-based)
   1641   //  -- rsp[(argc + 1) * 8] : receiver
   1642   // -----------------------------------
   1643   Condition const cc = (kind == MathMaxMinKind::kMin) ? below : above;
   1644   Heap::RootListIndex const root_index =
   1645       (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
   1646                                      : Heap::kMinusInfinityValueRootIndex;
   1647   XMMRegister const reg = (kind == MathMaxMinKind::kMin) ? xmm1 : xmm0;
   1648 
   1649   // Load the accumulator with the default return value (either -Infinity or
   1650   // +Infinity), with the tagged value in rdx and the double value in xmm0.
   1651   __ LoadRoot(rdx, root_index);
   1652   __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
   1653   __ Move(rcx, rax);
   1654 
   1655   Label done_loop, loop;
   1656   __ bind(&loop);
   1657   {
   1658     // Check if all parameters done.
   1659     __ testp(rcx, rcx);
   1660     __ j(zero, &done_loop);
   1661 
   1662     // Load the next parameter tagged value into rbx.
   1663     __ movp(rbx, Operand(rsp, rcx, times_pointer_size, 0));
   1664 
   1665     // Load the double value of the parameter into xmm1, maybe converting the
   1666     // parameter to a number first using the ToNumber builtin if necessary.
   1667     Label convert, convert_smi, convert_number, done_convert;
   1668     __ bind(&convert);
   1669     __ JumpIfSmi(rbx, &convert_smi);
   1670     __ JumpIfRoot(FieldOperand(rbx, HeapObject::kMapOffset),
   1671                   Heap::kHeapNumberMapRootIndex, &convert_number);
   1672     {
   1673       // Parameter is not a Number, use the ToNumber builtin to convert it.
   1674       FrameScope scope(masm, StackFrame::MANUAL);
   1675       __ Push(rbp);
   1676       __ Move(rbp, rsp);
   1677       __ Push(rsi);
   1678       __ Push(rdi);
   1679       __ Integer32ToSmi(rax, rax);
   1680       __ Integer32ToSmi(rcx, rcx);
   1681       __ Push(rax);
   1682       __ Push(rcx);
   1683       __ Push(rdx);
   1684       __ movp(rax, rbx);
   1685       __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
   1686       __ movp(rbx, rax);
   1687       __ Pop(rdx);
   1688       __ Pop(rcx);
   1689       __ Pop(rax);
   1690       __ Pop(rdi);
   1691       __ Pop(rsi);
   1692       {
   1693         // Restore the double accumulator value (xmm0).
   1694         Label restore_smi, done_restore;
   1695         __ JumpIfSmi(rdx, &restore_smi, Label::kNear);
   1696         __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
   1697         __ jmp(&done_restore, Label::kNear);
   1698         __ bind(&restore_smi);
   1699         __ SmiToDouble(xmm0, rdx);
   1700         __ bind(&done_restore);
   1701       }
   1702       __ SmiToInteger32(rcx, rcx);
   1703       __ SmiToInteger32(rax, rax);
   1704       __ leave();
   1705     }
   1706     __ jmp(&convert);
   1707     __ bind(&convert_number);
   1708     __ Movsd(xmm1, FieldOperand(rbx, HeapNumber::kValueOffset));
   1709     __ jmp(&done_convert, Label::kNear);
   1710     __ bind(&convert_smi);
   1711     __ SmiToDouble(xmm1, rbx);
   1712     __ bind(&done_convert);
   1713 
   1714     // Perform the actual comparison with the accumulator value on the left hand
   1715     // side (xmm0) and the next parameter value on the right hand side (xmm1).
   1716     Label compare_equal, compare_nan, compare_swap, done_compare;
   1717     __ Ucomisd(xmm0, xmm1);
   1718     __ j(parity_even, &compare_nan, Label::kNear);
   1719     __ j(cc, &done_compare, Label::kNear);
   1720     __ j(equal, &compare_equal, Label::kNear);
   1721 
   1722     // Result is on the right hand side.
   1723     __ bind(&compare_swap);
   1724     __ Movaps(xmm0, xmm1);
   1725     __ Move(rdx, rbx);
   1726     __ jmp(&done_compare, Label::kNear);
   1727 
   1728     // At least one side is NaN, which means that the result will be NaN too.
   1729     __ bind(&compare_nan);
   1730     __ LoadRoot(rdx, Heap::kNanValueRootIndex);
   1731     __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
   1732     __ jmp(&done_compare, Label::kNear);
   1733 
   1734     // Left and right hand side are equal, check for -0 vs. +0.
   1735     __ bind(&compare_equal);
   1736     __ Movmskpd(kScratchRegister, reg);
   1737     __ testl(kScratchRegister, Immediate(1));
   1738     __ j(not_zero, &compare_swap);
   1739 
   1740     __ bind(&done_compare);
   1741     __ decp(rcx);
   1742     __ jmp(&loop);
   1743   }
   1744 
   1745   __ bind(&done_loop);
   1746   __ PopReturnAddressTo(rcx);
   1747   __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
   1748   __ PushReturnAddressFrom(rcx);
   1749   __ movp(rax, rdx);
   1750   __ Ret();
   1751 }
   1752 
   1753 // static
   1754 void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
   1755   // ----------- S t a t e -------------
   1756   //  -- rax                 : number of arguments
   1757   //  -- rdi                 : constructor function
   1758   //  -- rsp[0]              : return address
   1759   //  -- rsp[(argc - n) * 8] : arg[n] (zero-based)
   1760   //  -- rsp[(argc + 1) * 8] : receiver
   1761   // -----------------------------------
   1762 
   1763   // 1. Load the first argument into rax and get rid of the rest (including the
   1764   // receiver).
   1765   Label no_arguments;
   1766   {
   1767     StackArgumentsAccessor args(rsp, rax);
   1768     __ testp(rax, rax);
   1769     __ j(zero, &no_arguments, Label::kNear);
   1770     __ movp(rbx, args.GetArgumentOperand(1));
   1771     __ PopReturnAddressTo(rcx);
   1772     __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
   1773     __ PushReturnAddressFrom(rcx);
   1774     __ movp(rax, rbx);
   1775   }
   1776 
   1777   // 2a. Convert the first argument to a number.
   1778   __ Jump(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
   1779 
   1780   // 2b. No arguments, return +0 (already in rax).
   1781   __ bind(&no_arguments);
   1782   __ ret(1 * kPointerSize);
   1783 }
   1784 
   1785 
   1786 // static
   1787 void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
   1788   // ----------- S t a t e -------------
   1789   //  -- rax                 : number of arguments
   1790   //  -- rdi                 : constructor function
   1791   //  -- rdx                 : new target
   1792   //  -- rsp[0]              : return address
   1793   //  -- rsp[(argc - n) * 8] : arg[n] (zero-based)
   1794   //  -- rsp[(argc + 1) * 8] : receiver
   1795   // -----------------------------------
   1796 
   1797   // 1. Make sure we operate in the context of the called function.
   1798   __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
   1799 
   1800   // 2. Load the first argument into rbx and get rid of the rest (including the
   1801   // receiver).
   1802   {
   1803     StackArgumentsAccessor args(rsp, rax);
   1804     Label no_arguments, done;
   1805     __ testp(rax, rax);
   1806     __ j(zero, &no_arguments, Label::kNear);
   1807     __ movp(rbx, args.GetArgumentOperand(1));
   1808     __ jmp(&done, Label::kNear);
   1809     __ bind(&no_arguments);
   1810     __ Move(rbx, Smi::FromInt(0));
   1811     __ bind(&done);
   1812     __ PopReturnAddressTo(rcx);
   1813     __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
   1814     __ PushReturnAddressFrom(rcx);
   1815   }
   1816 
   1817   // 3. Make sure rbx is a number.
   1818   {
   1819     Label done_convert;
   1820     __ JumpIfSmi(rbx, &done_convert);
   1821     __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
   1822                    Heap::kHeapNumberMapRootIndex);
   1823     __ j(equal, &done_convert);
   1824     {
   1825       FrameScope scope(masm, StackFrame::INTERNAL);
   1826       __ Push(rdx);
   1827       __ Push(rdi);
   1828       __ Move(rax, rbx);
   1829       __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
   1830       __ Move(rbx, rax);
   1831       __ Pop(rdi);
   1832       __ Pop(rdx);
   1833     }
   1834     __ bind(&done_convert);
   1835   }
   1836 
   1837   // 4. Check if new target and constructor differ.
   1838   Label new_object;
   1839   __ cmpp(rdx, rdi);
   1840   __ j(not_equal, &new_object);
   1841 
   1842   // 5. Allocate a JSValue wrapper for the number.
   1843   __ AllocateJSValue(rax, rdi, rbx, rcx, &new_object);
   1844   __ Ret();
   1845 
   1846   // 6. Fallback to the runtime to create new object.
   1847   __ bind(&new_object);
   1848   {
   1849     FrameScope scope(masm, StackFrame::INTERNAL);
   1850     __ Push(rbx);  // the first argument
   1851     FastNewObjectStub stub(masm->isolate());
   1852     __ CallStub(&stub);
   1853     __ Pop(FieldOperand(rax, JSValue::kValueOffset));
   1854   }
   1855   __ Ret();
   1856 }
   1857 
   1858 
   1859 // static
   1860 void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
   1861   // ----------- S t a t e -------------
   1862   //  -- rax                 : number of arguments
   1863   //  -- rdi                 : constructor function
   1864   //  -- rsp[0]              : return address
   1865   //  -- rsp[(argc - n) * 8] : arg[n] (zero-based)
   1866   //  -- rsp[(argc + 1) * 8] : receiver
   1867   // -----------------------------------
   1868 
   1869   // 1. Load the first argument into rax and get rid of the rest (including the
   1870   // receiver).
   1871   Label no_arguments;
   1872   {
   1873     StackArgumentsAccessor args(rsp, rax);
   1874     __ testp(rax, rax);
   1875     __ j(zero, &no_arguments, Label::kNear);
   1876     __ movp(rbx, args.GetArgumentOperand(1));
   1877     __ PopReturnAddressTo(rcx);
   1878     __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
   1879     __ PushReturnAddressFrom(rcx);
   1880     __ movp(rax, rbx);
   1881   }
   1882 
   1883   // 2a. At least one argument, return rax if it's a string, otherwise
   1884   // dispatch to appropriate conversion.
   1885   Label to_string, symbol_descriptive_string;
   1886   {
   1887     __ JumpIfSmi(rax, &to_string, Label::kNear);
   1888     STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
   1889     __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx);
   1890     __ j(above, &to_string, Label::kNear);
   1891     __ j(equal, &symbol_descriptive_string, Label::kNear);
   1892     __ Ret();
   1893   }
   1894 
   1895   // 2b. No arguments, return the empty string (and pop the receiver).
   1896   __ bind(&no_arguments);
   1897   {
   1898     __ LoadRoot(rax, Heap::kempty_stringRootIndex);
   1899     __ ret(1 * kPointerSize);
   1900   }
   1901 
   1902   // 3a. Convert rax to a string.
   1903   __ bind(&to_string);
   1904   {
   1905     ToStringStub stub(masm->isolate());
   1906     __ TailCallStub(&stub);
   1907   }
   1908 
   1909   // 3b. Convert symbol in rax to a string.
   1910   __ bind(&symbol_descriptive_string);
   1911   {
   1912     __ PopReturnAddressTo(rcx);
   1913     __ Push(rax);
   1914     __ PushReturnAddressFrom(rcx);
   1915     __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
   1916   }
   1917 }
   1918 
   1919 
   1920 // static
   1921 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
   1922   // ----------- S t a t e -------------
   1923   //  -- rax                 : number of arguments
   1924   //  -- rdi                 : constructor function
   1925   //  -- rdx                 : new target
   1926   //  -- rsp[0]              : return address
   1927   //  -- rsp[(argc - n) * 8] : arg[n] (zero-based)
   1928   //  -- rsp[(argc + 1) * 8] : receiver
   1929   // -----------------------------------
   1930 
   1931   // 1. Make sure we operate in the context of the called function.
   1932   __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
   1933 
   1934   // 2. Load the first argument into rbx and get rid of the rest (including the
   1935   // receiver).
   1936   {
   1937     StackArgumentsAccessor args(rsp, rax);
   1938     Label no_arguments, done;
   1939     __ testp(rax, rax);
   1940     __ j(zero, &no_arguments, Label::kNear);
   1941     __ movp(rbx, args.GetArgumentOperand(1));
   1942     __ jmp(&done, Label::kNear);
   1943     __ bind(&no_arguments);
   1944     __ LoadRoot(rbx, Heap::kempty_stringRootIndex);
   1945     __ bind(&done);
   1946     __ PopReturnAddressTo(rcx);
   1947     __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
   1948     __ PushReturnAddressFrom(rcx);
   1949   }
   1950 
   1951   // 3. Make sure rbx is a string.
   1952   {
   1953     Label convert, done_convert;
   1954     __ JumpIfSmi(rbx, &convert, Label::kNear);
   1955     __ CmpObjectType(rbx, FIRST_NONSTRING_TYPE, rcx);
   1956     __ j(below, &done_convert);
   1957     __ bind(&convert);
   1958     {
   1959       FrameScope scope(masm, StackFrame::INTERNAL);
   1960       ToStringStub stub(masm->isolate());
   1961       __ Push(rdx);
   1962       __ Push(rdi);
   1963       __ Move(rax, rbx);
   1964       __ CallStub(&stub);
   1965       __ Move(rbx, rax);
   1966       __ Pop(rdi);
   1967       __ Pop(rdx);
   1968     }
   1969     __ bind(&done_convert);
   1970   }
   1971 
   1972   // 4. Check if new target and constructor differ.
   1973   Label new_object;
   1974   __ cmpp(rdx, rdi);
   1975   __ j(not_equal, &new_object);
   1976 
   1977   // 5. Allocate a JSValue wrapper for the string.
   1978   __ AllocateJSValue(rax, rdi, rbx, rcx, &new_object);
   1979   __ Ret();
   1980 
   1981   // 6. Fallback to the runtime to create new object.
   1982   __ bind(&new_object);
   1983   {
   1984     FrameScope scope(masm, StackFrame::INTERNAL);
   1985     __ Push(rbx);  // the first argument
   1986     FastNewObjectStub stub(masm->isolate());
   1987     __ CallStub(&stub);
   1988     __ Pop(FieldOperand(rax, JSValue::kValueOffset));
   1989   }
   1990   __ Ret();
   1991 }
   1992 
   1993 
   1994 static void ArgumentsAdaptorStackCheck(MacroAssembler* masm,
   1995                                        Label* stack_overflow) {
   1996   // ----------- S t a t e -------------
   1997   //  -- rax : actual number of arguments
   1998   //  -- rbx : expected number of arguments
   1999   //  -- rdx : new target (passed through to callee)
   2000   //  -- rdi : function (passed through to callee)
   2001   // -----------------------------------
   2002   // Check the stack for overflow. We are not trying to catch
   2003   // interruptions (e.g. debug break and preemption) here, so the "real stack
   2004   // limit" is checked.
   2005   Label okay;
   2006   __ LoadRoot(r8, Heap::kRealStackLimitRootIndex);
   2007   __ movp(rcx, rsp);
   2008   // Make rcx the space we have left. The stack might already be overflowed
   2009   // here which will cause rcx to become negative.
   2010   __ subp(rcx, r8);
   2011   // Make r8 the space we need for the array when it is unrolled onto the
   2012   // stack.
   2013   __ movp(r8, rbx);
   2014   __ shlp(r8, Immediate(kPointerSizeLog2));
   2015   // Check if the arguments will overflow the stack.
   2016   __ cmpp(rcx, r8);
   2017   __ j(less_equal, stack_overflow);  // Signed comparison.
   2018 }
   2019 
   2020 
   2021 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
   2022   __ pushq(rbp);
   2023   __ movp(rbp, rsp);
   2024 
   2025   // Store the arguments adaptor context sentinel.
   2026   __ Push(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
   2027 
   2028   // Push the function on the stack.
   2029   __ Push(rdi);
   2030 
   2031   // Preserve the number of arguments on the stack. Must preserve rax,
   2032   // rbx and rcx because these registers are used when copying the
   2033   // arguments and the receiver.
   2034   __ Integer32ToSmi(r8, rax);
   2035   __ Push(r8);
   2036 }
   2037 
   2038 
   2039 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
   2040   // Retrieve the number of arguments from the stack. Number is a Smi.
   2041   __ movp(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
   2042 
   2043   // Leave the frame.
   2044   __ movp(rsp, rbp);
   2045   __ popq(rbp);
   2046 
   2047   // Remove caller arguments from the stack.
   2048   __ PopReturnAddressTo(rcx);
   2049   SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
   2050   __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
   2051   __ PushReturnAddressFrom(rcx);
   2052 }
   2053 
   2054 // static
   2055 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
   2056   // ----------- S t a t e -------------
   2057   //  -- rdx    : requested object size (untagged)
   2058   //  -- rsp[0] : return address
   2059   // -----------------------------------
   2060   __ Integer32ToSmi(rdx, rdx);
   2061   __ PopReturnAddressTo(rcx);
   2062   __ Push(rdx);
   2063   __ PushReturnAddressFrom(rcx);
   2064   __ Move(rsi, Smi::FromInt(0));
   2065   __ TailCallRuntime(Runtime::kAllocateInNewSpace);
   2066 }
   2067 
   2068 // static
   2069 void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
   2070   // ----------- S t a t e -------------
   2071   //  -- rdx    : requested object size (untagged)
   2072   //  -- rsp[0] : return address
   2073   // -----------------------------------
   2074   __ Integer32ToSmi(rdx, rdx);
   2075   __ PopReturnAddressTo(rcx);
   2076   __ Push(rdx);
   2077   __ Push(Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
   2078   __ PushReturnAddressFrom(rcx);
   2079   __ Move(rsi, Smi::FromInt(0));
   2080   __ TailCallRuntime(Runtime::kAllocateInTargetSpace);
   2081 }
   2082 
   2083 void Builtins::Generate_StringToNumber(MacroAssembler* masm) {
   2084   // The StringToNumber stub takes one argument in rax.
   2085   __ AssertString(rax);
   2086 
   2087   // Check if string has a cached array index.
   2088   Label runtime;
   2089   __ testl(FieldOperand(rax, String::kHashFieldOffset),
   2090            Immediate(String::kContainsCachedArrayIndexMask));
   2091   __ j(not_zero, &runtime, Label::kNear);
   2092   __ movl(rax, FieldOperand(rax, String::kHashFieldOffset));
   2093   __ IndexFromHash(rax, rax);
   2094   __ Ret();
   2095 
   2096   __ bind(&runtime);
   2097   {
   2098     FrameScope frame(masm, StackFrame::INTERNAL);
   2099     // Push argument.
   2100     __ Push(rax);
   2101     // We cannot use a tail call here because this builtin can also be called
   2102     // from wasm.
   2103     __ CallRuntime(Runtime::kStringToNumber);
   2104   }
   2105   __ Ret();
   2106 }
   2107 
   2108 // static
   2109 void Builtins::Generate_ToNumber(MacroAssembler* masm) {
   2110   // The ToNumber stub takes one argument in rax.
   2111   Label not_smi;
   2112   __ JumpIfNotSmi(rax, &not_smi, Label::kNear);
   2113   __ Ret();
   2114   __ bind(&not_smi);
   2115 
   2116   Label not_heap_number;
   2117   __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
   2118                  Heap::kHeapNumberMapRootIndex);
   2119   __ j(not_equal, &not_heap_number, Label::kNear);
   2120   __ Ret();
   2121   __ bind(&not_heap_number);
   2122 
   2123   __ Jump(masm->isolate()->builtins()->NonNumberToNumber(),
   2124           RelocInfo::CODE_TARGET);
   2125 }
   2126 
   2127 // static
   2128 void Builtins::Generate_NonNumberToNumber(MacroAssembler* masm) {
   2129   // The NonNumberToNumber stub takes one argument in rax.
   2130   __ AssertNotNumber(rax);
   2131 
   2132   Label not_string;
   2133   __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdi);
   2134   // rax: object
   2135   // rdi: object map
   2136   __ j(above_equal, &not_string, Label::kNear);
   2137   __ Jump(masm->isolate()->builtins()->StringToNumber(),
   2138           RelocInfo::CODE_TARGET);
   2139   __ bind(&not_string);
   2140 
   2141   Label not_oddball;
   2142   __ CmpInstanceType(rdi, ODDBALL_TYPE);
   2143   __ j(not_equal, &not_oddball, Label::kNear);
   2144   __ movp(rax, FieldOperand(rax, Oddball::kToNumberOffset));
   2145   __ Ret();
   2146   __ bind(&not_oddball);
   2147   {
   2148     FrameScope frame(masm, StackFrame::INTERNAL);
   2149     // Push argument.
   2150     __ Push(rax);
   2151     // We cannot use a tail call here because this builtin can also be called
   2152     // from wasm.
   2153     __ CallRuntime(Runtime::kToNumber);
   2154   }
   2155   __ Ret();
   2156 }
   2157 
   2158 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
   2159   // ----------- S t a t e -------------
   2160   //  -- rax : actual number of arguments
   2161   //  -- rbx : expected number of arguments
   2162   //  -- rdx : new target (passed through to callee)
   2163   //  -- rdi : function (passed through to callee)
   2164   // -----------------------------------
   2165 
   2166   Label invoke, dont_adapt_arguments, stack_overflow;
   2167   Counters* counters = masm->isolate()->counters();
   2168   __ IncrementCounter(counters->arguments_adaptors(), 1);
   2169 
   2170   Label enough, too_few;
   2171   __ cmpp(rax, rbx);
   2172   __ j(less, &too_few);
   2173   __ cmpp(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
   2174   __ j(equal, &dont_adapt_arguments);
   2175 
   2176   {  // Enough parameters: Actual >= expected.
   2177     __ bind(&enough);
   2178     EnterArgumentsAdaptorFrame(masm);
   2179     ArgumentsAdaptorStackCheck(masm, &stack_overflow);
   2180 
   2181     // Copy receiver and all expected arguments.
   2182     const int offset = StandardFrameConstants::kCallerSPOffset;
   2183     __ leap(rax, Operand(rbp, rax, times_pointer_size, offset));
   2184     __ Set(r8, -1);  // account for receiver
   2185 
   2186     Label copy;
   2187     __ bind(&copy);
   2188     __ incp(r8);
   2189     __ Push(Operand(rax, 0));
   2190     __ subp(rax, Immediate(kPointerSize));
   2191     __ cmpp(r8, rbx);
   2192     __ j(less, &copy);
   2193     __ jmp(&invoke);
   2194   }
   2195 
   2196   {  // Too few parameters: Actual < expected.
   2197     __ bind(&too_few);
   2198 
   2199     EnterArgumentsAdaptorFrame(masm);
   2200     ArgumentsAdaptorStackCheck(masm, &stack_overflow);
   2201 
   2202     // Copy receiver and all actual arguments.
   2203     const int offset = StandardFrameConstants::kCallerSPOffset;
   2204     __ leap(rdi, Operand(rbp, rax, times_pointer_size, offset));
   2205     __ Set(r8, -1);  // account for receiver
   2206 
   2207     Label copy;
   2208     __ bind(&copy);
   2209     __ incp(r8);
   2210     __ Push(Operand(rdi, 0));
   2211     __ subp(rdi, Immediate(kPointerSize));
   2212     __ cmpp(r8, rax);
   2213     __ j(less, &copy);
   2214 
   2215     // Fill remaining expected arguments with undefined values.
   2216     Label fill;
   2217     __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
   2218     __ bind(&fill);
   2219     __ incp(r8);
   2220     __ Push(kScratchRegister);
   2221     __ cmpp(r8, rbx);
   2222     __ j(less, &fill);
   2223 
   2224     // Restore function pointer.
   2225     __ movp(rdi, Operand(rbp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
   2226   }
   2227 
   2228   // Call the entry point.
   2229   __ bind(&invoke);
   2230   __ movp(rax, rbx);
   2231   // rax : expected number of arguments
   2232   // rdx : new target (passed through to callee)
   2233   // rdi : function (passed through to callee)
   2234   __ movp(rcx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
   2235   __ call(rcx);
   2236 
   2237   // Store offset of return address for deoptimizer.
   2238   masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
   2239 
   2240   // Leave frame and return.
   2241   LeaveArgumentsAdaptorFrame(masm);
   2242   __ ret(0);
   2243 
   2244   // -------------------------------------------
   2245   // Dont adapt arguments.
   2246   // -------------------------------------------
   2247   __ bind(&dont_adapt_arguments);
   2248   __ movp(rcx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
   2249   __ jmp(rcx);
   2250 
   2251   __ bind(&stack_overflow);
   2252   {
   2253     FrameScope frame(masm, StackFrame::MANUAL);
   2254     __ CallRuntime(Runtime::kThrowStackOverflow);
   2255     __ int3();
   2256   }
   2257 }
   2258 
   2259 
   2260 // static
   2261 void Builtins::Generate_Apply(MacroAssembler* masm) {
   2262   // ----------- S t a t e -------------
   2263   //  -- rax    : argumentsList
   2264   //  -- rdi    : target
   2265   //  -- rdx    : new.target (checked to be constructor or undefined)
   2266   //  -- rsp[0] : return address.
   2267   //  -- rsp[8] : thisArgument
   2268   // -----------------------------------
   2269 
   2270   // Create the list of arguments from the array-like argumentsList.
   2271   {
   2272     Label create_arguments, create_array, create_runtime, done_create;
   2273     __ JumpIfSmi(rax, &create_runtime);
   2274 
   2275     // Load the map of argumentsList into rcx.
   2276     __ movp(rcx, FieldOperand(rax, HeapObject::kMapOffset));
   2277 
   2278     // Load native context into rbx.
   2279     __ movp(rbx, NativeContextOperand());
   2280 
   2281     // Check if argumentsList is an (unmodified) arguments object.
   2282     __ cmpp(rcx, ContextOperand(rbx, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
   2283     __ j(equal, &create_arguments);
   2284     __ cmpp(rcx, ContextOperand(rbx, Context::STRICT_ARGUMENTS_MAP_INDEX));
   2285     __ j(equal, &create_arguments);
   2286 
   2287     // Check if argumentsList is a fast JSArray.
   2288     __ CmpInstanceType(rcx, JS_ARRAY_TYPE);
   2289     __ j(equal, &create_array);
   2290 
   2291     // Ask the runtime to create the list (actually a FixedArray).
   2292     __ bind(&create_runtime);
   2293     {
   2294       FrameScope scope(masm, StackFrame::INTERNAL);
   2295       __ Push(rdi);
   2296       __ Push(rdx);
   2297       __ Push(rax);
   2298       __ CallRuntime(Runtime::kCreateListFromArrayLike);
   2299       __ Pop(rdx);
   2300       __ Pop(rdi);
   2301       __ SmiToInteger32(rbx, FieldOperand(rax, FixedArray::kLengthOffset));
   2302     }
   2303     __ jmp(&done_create);
   2304 
   2305     // Try to create the list from an arguments object.
   2306     __ bind(&create_arguments);
   2307     __ movp(rbx, FieldOperand(rax, JSArgumentsObject::kLengthOffset));
   2308     __ movp(rcx, FieldOperand(rax, JSObject::kElementsOffset));
   2309     __ cmpp(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
   2310     __ j(not_equal, &create_runtime);
   2311     __ SmiToInteger32(rbx, rbx);
   2312     __ movp(rax, rcx);
   2313     __ jmp(&done_create);
   2314 
   2315     // Try to create the list from a JSArray object.
   2316     __ bind(&create_array);
   2317     __ movzxbp(rcx, FieldOperand(rcx, Map::kBitField2Offset));
   2318     __ DecodeField<Map::ElementsKindBits>(rcx);
   2319     STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
   2320     STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
   2321     STATIC_ASSERT(FAST_ELEMENTS == 2);
   2322     __ cmpl(rcx, Immediate(FAST_ELEMENTS));
   2323     __ j(above, &create_runtime);
   2324     __ cmpl(rcx, Immediate(FAST_HOLEY_SMI_ELEMENTS));
   2325     __ j(equal, &create_runtime);
   2326     __ SmiToInteger32(rbx, FieldOperand(rax, JSArray::kLengthOffset));
   2327     __ movp(rax, FieldOperand(rax, JSArray::kElementsOffset));
   2328 
   2329     __ bind(&done_create);
   2330   }
   2331 
   2332   // Check for stack overflow.
   2333   {
   2334     // Check the stack for overflow. We are not trying to catch interruptions
   2335     // (i.e. debug break and preemption) here, so check the "real stack limit".
   2336     Label done;
   2337     __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
   2338     __ movp(rcx, rsp);
   2339     // Make rcx the space we have left. The stack might already be overflowed
   2340     // here which will cause rcx to become negative.
   2341     __ subp(rcx, kScratchRegister);
   2342     __ sarp(rcx, Immediate(kPointerSizeLog2));
   2343     // Check if the arguments will overflow the stack.
   2344     __ cmpp(rcx, rbx);
   2345     __ j(greater, &done, Label::kNear);  // Signed comparison.
   2346     __ TailCallRuntime(Runtime::kThrowStackOverflow);
   2347     __ bind(&done);
   2348   }
   2349 
   2350   // ----------- S t a t e -------------
   2351   //  -- rdi    : target
   2352   //  -- rax    : args (a FixedArray built from argumentsList)
   2353   //  -- rbx    : len (number of elements to push from args)
   2354   //  -- rdx    : new.target (checked to be constructor or undefined)
   2355   //  -- rsp[0] : return address.
   2356   //  -- rsp[8] : thisArgument
   2357   // -----------------------------------
   2358 
   2359   // Push arguments onto the stack (thisArgument is already on the stack).
   2360   {
   2361     __ PopReturnAddressTo(r8);
   2362     __ Set(rcx, 0);
   2363     Label done, loop;
   2364     __ bind(&loop);
   2365     __ cmpl(rcx, rbx);
   2366     __ j(equal, &done, Label::kNear);
   2367     __ Push(
   2368         FieldOperand(rax, rcx, times_pointer_size, FixedArray::kHeaderSize));
   2369     __ incl(rcx);
   2370     __ jmp(&loop);
   2371     __ bind(&done);
   2372     __ PushReturnAddressFrom(r8);
   2373     __ Move(rax, rcx);
   2374   }
   2375 
   2376   // Dispatch to Call or Construct depending on whether new.target is undefined.
   2377   {
   2378     __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex);
   2379     __ j(equal, masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
   2380     __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
   2381   }
   2382 }
   2383 
   2384 namespace {
   2385 
   2386 // Drops top JavaScript frame and an arguments adaptor frame below it (if
   2387 // present) preserving all the arguments prepared for current call.
   2388 // Does nothing if debugger is currently active.
   2389 // ES6 14.6.3. PrepareForTailCall
   2390 //
   2391 // Stack structure for the function g() tail calling f():
   2392 //
   2393 // ------- Caller frame: -------
   2394 // |  ...
   2395 // |  g()'s arg M
   2396 // |  ...
   2397 // |  g()'s arg 1
   2398 // |  g()'s receiver arg
   2399 // |  g()'s caller pc
   2400 // ------- g()'s frame: -------
   2401 // |  g()'s caller fp      <- fp
   2402 // |  g()'s context
   2403 // |  function pointer: g
   2404 // |  -------------------------
   2405 // |  ...
   2406 // |  ...
   2407 // |  f()'s arg N
   2408 // |  ...
   2409 // |  f()'s arg 1
   2410 // |  f()'s receiver arg
   2411 // |  f()'s caller pc      <- sp
   2412 // ----------------------
   2413 //
   2414 void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
   2415                         Register scratch1, Register scratch2,
   2416                         Register scratch3) {
   2417   DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
   2418   Comment cmnt(masm, "[ PrepareForTailCall");
   2419 
   2420   // Prepare for tail call only if ES2015 tail call elimination is active.
   2421   Label done;
   2422   ExternalReference is_tail_call_elimination_enabled =
   2423       ExternalReference::is_tail_call_elimination_enabled_address(
   2424           masm->isolate());
   2425   __ Move(kScratchRegister, is_tail_call_elimination_enabled);
   2426   __ cmpb(Operand(kScratchRegister, 0), Immediate(0));
   2427   __ j(equal, &done);
   2428 
   2429   // Drop possible interpreter handler/stub frame.
   2430   {
   2431     Label no_interpreter_frame;
   2432     __ Cmp(Operand(rbp, CommonFrameConstants::kContextOrFrameTypeOffset),
   2433            Smi::FromInt(StackFrame::STUB));
   2434     __ j(not_equal, &no_interpreter_frame, Label::kNear);
   2435     __ movp(rbp, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
   2436     __ bind(&no_interpreter_frame);
   2437   }
   2438 
   2439   // Check if next frame is an arguments adaptor frame.
   2440   Register caller_args_count_reg = scratch1;
   2441   Label no_arguments_adaptor, formal_parameter_count_loaded;
   2442   __ movp(scratch2, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
   2443   __ Cmp(Operand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset),
   2444          Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
   2445   __ j(not_equal, &no_arguments_adaptor, Label::kNear);
   2446 
   2447   // Drop current frame and load arguments count from arguments adaptor frame.
   2448   __ movp(rbp, scratch2);
   2449   __ SmiToInteger32(
   2450       caller_args_count_reg,
   2451       Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
   2452   __ jmp(&formal_parameter_count_loaded, Label::kNear);
   2453 
   2454   __ bind(&no_arguments_adaptor);
   2455   // Load caller's formal parameter count
   2456   __ movp(scratch1, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
   2457   __ movp(scratch1,
   2458           FieldOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
   2459   __ LoadSharedFunctionInfoSpecialField(
   2460       caller_args_count_reg, scratch1,
   2461       SharedFunctionInfo::kFormalParameterCountOffset);
   2462 
   2463   __ bind(&formal_parameter_count_loaded);
   2464 
   2465   ParameterCount callee_args_count(args_reg);
   2466   __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
   2467                         scratch3, ReturnAddressState::kOnStack);
   2468   __ bind(&done);
   2469 }
   2470 }  // namespace
   2471 
   2472 // static
   2473 void Builtins::Generate_CallFunction(MacroAssembler* masm,
   2474                                      ConvertReceiverMode mode,
   2475                                      TailCallMode tail_call_mode) {
   2476   // ----------- S t a t e -------------
   2477   //  -- rax : the number of arguments (not including the receiver)
   2478   //  -- rdi : the function to call (checked to be a JSFunction)
   2479   // -----------------------------------
   2480   StackArgumentsAccessor args(rsp, rax);
   2481   __ AssertFunction(rdi);
   2482 
   2483   // ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
   2484   // Check that the function is not a "classConstructor".
   2485   Label class_constructor;
   2486   __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
   2487   __ testb(FieldOperand(rdx, SharedFunctionInfo::kFunctionKindByteOffset),
   2488            Immediate(SharedFunctionInfo::kClassConstructorBitsWithinByte));
   2489   __ j(not_zero, &class_constructor);
   2490 
   2491   // ----------- S t a t e -------------
   2492   //  -- rax : the number of arguments (not including the receiver)
   2493   //  -- rdx : the shared function info.
   2494   //  -- rdi : the function to call (checked to be a JSFunction)
   2495   // -----------------------------------
   2496 
   2497   // Enter the context of the function; ToObject has to run in the function
   2498   // context, and we also need to take the global proxy from the function
   2499   // context in case of conversion.
   2500   STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset ==
   2501                 SharedFunctionInfo::kStrictModeByteOffset);
   2502   __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
   2503   // We need to convert the receiver for non-native sloppy mode functions.
   2504   Label done_convert;
   2505   __ testb(FieldOperand(rdx, SharedFunctionInfo::kNativeByteOffset),
   2506            Immediate((1 << SharedFunctionInfo::kNativeBitWithinByte) |
   2507                      (1 << SharedFunctionInfo::kStrictModeBitWithinByte)));
   2508   __ j(not_zero, &done_convert);
   2509   {
   2510     // ----------- S t a t e -------------
   2511     //  -- rax : the number of arguments (not including the receiver)
   2512     //  -- rdx : the shared function info.
   2513     //  -- rdi : the function to call (checked to be a JSFunction)
   2514     //  -- rsi : the function context.
   2515     // -----------------------------------
   2516 
   2517     if (mode == ConvertReceiverMode::kNullOrUndefined) {
   2518       // Patch receiver to global proxy.
   2519       __ LoadGlobalProxy(rcx);
   2520     } else {
   2521       Label convert_to_object, convert_receiver;
   2522       __ movp(rcx, args.GetReceiverOperand());
   2523       __ JumpIfSmi(rcx, &convert_to_object, Label::kNear);
   2524       STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
   2525       __ CmpObjectType(rcx, FIRST_JS_RECEIVER_TYPE, rbx);
   2526       __ j(above_equal, &done_convert);
   2527       if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
   2528         Label convert_global_proxy;
   2529         __ JumpIfRoot(rcx, Heap::kUndefinedValueRootIndex,
   2530                       &convert_global_proxy, Label::kNear);
   2531         __ JumpIfNotRoot(rcx, Heap::kNullValueRootIndex, &convert_to_object,
   2532                          Label::kNear);
   2533         __ bind(&convert_global_proxy);
   2534         {
   2535           // Patch receiver to global proxy.
   2536           __ LoadGlobalProxy(rcx);
   2537         }
   2538         __ jmp(&convert_receiver);
   2539       }
   2540       __ bind(&convert_to_object);
   2541       {
   2542         // Convert receiver using ToObject.
   2543         // TODO(bmeurer): Inline the allocation here to avoid building the frame
   2544         // in the fast case? (fall back to AllocateInNewSpace?)
   2545         FrameScope scope(masm, StackFrame::INTERNAL);
   2546         __ Integer32ToSmi(rax, rax);
   2547         __ Push(rax);
   2548         __ Push(rdi);
   2549         __ movp(rax, rcx);
   2550         ToObjectStub stub(masm->isolate());
   2551         __ CallStub(&stub);
   2552         __ movp(rcx, rax);
   2553         __ Pop(rdi);
   2554         __ Pop(rax);
   2555         __ SmiToInteger32(rax, rax);
   2556       }
   2557       __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
   2558       __ bind(&convert_receiver);
   2559     }
   2560     __ movp(args.GetReceiverOperand(), rcx);
   2561   }
   2562   __ bind(&done_convert);
   2563 
   2564   // ----------- S t a t e -------------
   2565   //  -- rax : the number of arguments (not including the receiver)
   2566   //  -- rdx : the shared function info.
   2567   //  -- rdi : the function to call (checked to be a JSFunction)
   2568   //  -- rsi : the function context.
   2569   // -----------------------------------
   2570 
   2571   if (tail_call_mode == TailCallMode::kAllow) {
   2572     PrepareForTailCall(masm, rax, rbx, rcx, r8);
   2573   }
   2574 
   2575   __ LoadSharedFunctionInfoSpecialField(
   2576       rbx, rdx, SharedFunctionInfo::kFormalParameterCountOffset);
   2577   ParameterCount actual(rax);
   2578   ParameterCount expected(rbx);
   2579 
   2580   __ InvokeFunctionCode(rdi, no_reg, expected, actual, JUMP_FUNCTION,
   2581                         CheckDebugStepCallWrapper());
   2582 
   2583   // The function is a "classConstructor", need to raise an exception.
   2584   __ bind(&class_constructor);
   2585   {
   2586     FrameScope frame(masm, StackFrame::INTERNAL);
   2587     __ Push(rdi);
   2588     __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
   2589   }
   2590 }
   2591 
   2592 
   2593 namespace {
   2594 
   2595 void Generate_PushBoundArguments(MacroAssembler* masm) {
   2596   // ----------- S t a t e -------------
   2597   //  -- rax : the number of arguments (not including the receiver)
   2598   //  -- rdx : new.target (only in case of [[Construct]])
   2599   //  -- rdi : target (checked to be a JSBoundFunction)
   2600   // -----------------------------------
   2601 
   2602   // Load [[BoundArguments]] into rcx and length of that into rbx.
   2603   Label no_bound_arguments;
   2604   __ movp(rcx, FieldOperand(rdi, JSBoundFunction::kBoundArgumentsOffset));
   2605   __ SmiToInteger32(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
   2606   __ testl(rbx, rbx);
   2607   __ j(zero, &no_bound_arguments);
   2608   {
   2609     // ----------- S t a t e -------------
   2610     //  -- rax : the number of arguments (not including the receiver)
   2611     //  -- rdx : new.target (only in case of [[Construct]])
   2612     //  -- rdi : target (checked to be a JSBoundFunction)
   2613     //  -- rcx : the [[BoundArguments]] (implemented as FixedArray)
   2614     //  -- rbx : the number of [[BoundArguments]] (checked to be non-zero)
   2615     // -----------------------------------
   2616 
   2617     // Reserve stack space for the [[BoundArguments]].
   2618     {
   2619       Label done;
   2620       __ leap(kScratchRegister, Operand(rbx, times_pointer_size, 0));
   2621       __ subp(rsp, kScratchRegister);
   2622       // Check the stack for overflow. We are not trying to catch interruptions
   2623       // (i.e. debug break and preemption) here, so check the "real stack
   2624       // limit".
   2625       __ CompareRoot(rsp, Heap::kRealStackLimitRootIndex);
   2626       __ j(greater, &done, Label::kNear);  // Signed comparison.
   2627       // Restore the stack pointer.
   2628       __ leap(rsp, Operand(rsp, rbx, times_pointer_size, 0));
   2629       {
   2630         FrameScope scope(masm, StackFrame::MANUAL);
   2631         __ EnterFrame(StackFrame::INTERNAL);
   2632         __ CallRuntime(Runtime::kThrowStackOverflow);
   2633       }
   2634       __ bind(&done);
   2635     }
   2636 
   2637     // Adjust effective number of arguments to include return address.
   2638     __ incl(rax);
   2639 
   2640     // Relocate arguments and return address down the stack.
   2641     {
   2642       Label loop;
   2643       __ Set(rcx, 0);
   2644       __ leap(rbx, Operand(rsp, rbx, times_pointer_size, 0));
   2645       __ bind(&loop);
   2646       __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
   2647       __ movp(Operand(rsp, rcx, times_pointer_size, 0), kScratchRegister);
   2648       __ incl(rcx);
   2649       __ cmpl(rcx, rax);
   2650       __ j(less, &loop);
   2651     }
   2652 
   2653     // Copy [[BoundArguments]] to the stack (below the arguments).
   2654     {
   2655       Label loop;
   2656       __ movp(rcx, FieldOperand(rdi, JSBoundFunction::kBoundArgumentsOffset));
   2657       __ SmiToInteger32(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
   2658       __ bind(&loop);
   2659       __ decl(rbx);
   2660       __ movp(kScratchRegister, FieldOperand(rcx, rbx, times_pointer_size,
   2661                                              FixedArray::kHeaderSize));
   2662       __ movp(Operand(rsp, rax, times_pointer_size, 0), kScratchRegister);
   2663       __ leal(rax, Operand(rax, 1));
   2664       __ j(greater, &loop);
   2665     }
   2666 
   2667     // Adjust effective number of arguments (rax contains the number of
   2668     // arguments from the call plus return address plus the number of
   2669     // [[BoundArguments]]), so we need to subtract one for the return address.
   2670     __ decl(rax);
   2671   }
   2672   __ bind(&no_bound_arguments);
   2673 }
   2674 
   2675 }  // namespace
   2676 
   2677 
   2678 // static
   2679 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
   2680                                               TailCallMode tail_call_mode) {
   2681   // ----------- S t a t e -------------
   2682   //  -- rax : the number of arguments (not including the receiver)
   2683   //  -- rdi : the function to call (checked to be a JSBoundFunction)
   2684   // -----------------------------------
   2685   __ AssertBoundFunction(rdi);
   2686 
   2687   if (tail_call_mode == TailCallMode::kAllow) {
   2688     PrepareForTailCall(masm, rax, rbx, rcx, r8);
   2689   }
   2690 
   2691   // Patch the receiver to [[BoundThis]].
   2692   StackArgumentsAccessor args(rsp, rax);
   2693   __ movp(rbx, FieldOperand(rdi, JSBoundFunction::kBoundThisOffset));
   2694   __ movp(args.GetReceiverOperand(), rbx);
   2695 
   2696   // Push the [[BoundArguments]] onto the stack.
   2697   Generate_PushBoundArguments(masm);
   2698 
   2699   // Call the [[BoundTargetFunction]] via the Call builtin.
   2700   __ movp(rdi, FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
   2701   __ Load(rcx,
   2702           ExternalReference(Builtins::kCall_ReceiverIsAny, masm->isolate()));
   2703   __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
   2704   __ jmp(rcx);
   2705 }
   2706 
   2707 
   2708 // static
   2709 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
   2710                              TailCallMode tail_call_mode) {
   2711   // ----------- S t a t e -------------
   2712   //  -- rax : the number of arguments (not including the receiver)
   2713   //  -- rdi : the target to call (can be any Object)
   2714   // -----------------------------------
   2715   StackArgumentsAccessor args(rsp, rax);
   2716 
   2717   Label non_callable, non_function, non_smi;
   2718   __ JumpIfSmi(rdi, &non_callable);
   2719   __ bind(&non_smi);
   2720   __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
   2721   __ j(equal, masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
   2722        RelocInfo::CODE_TARGET);
   2723   __ CmpInstanceType(rcx, JS_BOUND_FUNCTION_TYPE);
   2724   __ j(equal, masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
   2725        RelocInfo::CODE_TARGET);
   2726 
   2727   // Check if target has a [[Call]] internal method.
   2728   __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
   2729            Immediate(1 << Map::kIsCallable));
   2730   __ j(zero, &non_callable);
   2731 
   2732   __ CmpInstanceType(rcx, JS_PROXY_TYPE);
   2733   __ j(not_equal, &non_function);
   2734 
   2735   // 0. Prepare for tail call if necessary.
   2736   if (tail_call_mode == TailCallMode::kAllow) {
   2737     PrepareForTailCall(masm, rax, rbx, rcx, r8);
   2738   }
   2739 
   2740   // 1. Runtime fallback for Proxy [[Call]].
   2741   __ PopReturnAddressTo(kScratchRegister);
   2742   __ Push(rdi);
   2743   __ PushReturnAddressFrom(kScratchRegister);
   2744   // Increase the arguments size to include the pushed function and the
   2745   // existing receiver on the stack.
   2746   __ addp(rax, Immediate(2));
   2747   // Tail-call to the runtime.
   2748   __ JumpToExternalReference(
   2749       ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
   2750 
   2751   // 2. Call to something else, which might have a [[Call]] internal method (if
   2752   // not we raise an exception).
   2753   __ bind(&non_function);
   2754   // Overwrite the original receiver with the (original) target.
   2755   __ movp(args.GetReceiverOperand(), rdi);
   2756   // Let the "call_as_function_delegate" take care of the rest.
   2757   __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, rdi);
   2758   __ Jump(masm->isolate()->builtins()->CallFunction(
   2759               ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
   2760           RelocInfo::CODE_TARGET);
   2761 
   2762   // 3. Call to something that is not callable.
   2763   __ bind(&non_callable);
   2764   {
   2765     FrameScope scope(masm, StackFrame::INTERNAL);
   2766     __ Push(rdi);
   2767     __ CallRuntime(Runtime::kThrowCalledNonCallable);
   2768   }
   2769 }
   2770 
   2771 
   2772 // static
   2773 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
   2774   // ----------- S t a t e -------------
   2775   //  -- rax : the number of arguments (not including the receiver)
   2776   //  -- rdx : the new target (checked to be a constructor)
   2777   //  -- rdi : the constructor to call (checked to be a JSFunction)
   2778   // -----------------------------------
   2779   __ AssertFunction(rdi);
   2780 
   2781   // Calling convention for function specific ConstructStubs require
   2782   // rbx to contain either an AllocationSite or undefined.
   2783   __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
   2784 
   2785   // Tail call to the function-specific construct stub (still in the caller
   2786   // context at this point).
   2787   __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
   2788   __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kConstructStubOffset));
   2789   __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
   2790   __ jmp(rcx);
   2791 }
   2792 
   2793 
   2794 // static
   2795 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
   2796   // ----------- S t a t e -------------
   2797   //  -- rax : the number of arguments (not including the receiver)
   2798   //  -- rdx : the new target (checked to be a constructor)
   2799   //  -- rdi : the constructor to call (checked to be a JSBoundFunction)
   2800   // -----------------------------------
   2801   __ AssertBoundFunction(rdi);
   2802 
   2803   // Push the [[BoundArguments]] onto the stack.
   2804   Generate_PushBoundArguments(masm);
   2805 
   2806   // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
   2807   {
   2808     Label done;
   2809     __ cmpp(rdi, rdx);
   2810     __ j(not_equal, &done, Label::kNear);
   2811     __ movp(rdx,
   2812             FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
   2813     __ bind(&done);
   2814   }
   2815 
   2816   // Construct the [[BoundTargetFunction]] via the Construct builtin.
   2817   __ movp(rdi, FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
   2818   __ Load(rcx, ExternalReference(Builtins::kConstruct, masm->isolate()));
   2819   __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
   2820   __ jmp(rcx);
   2821 }
   2822 
   2823 
   2824 // static
   2825 void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
   2826   // ----------- S t a t e -------------
   2827   //  -- rax : the number of arguments (not including the receiver)
   2828   //  -- rdi : the constructor to call (checked to be a JSProxy)
   2829   //  -- rdx : the new target (either the same as the constructor or
   2830   //           the JSFunction on which new was invoked initially)
   2831   // -----------------------------------
   2832 
   2833   // Call into the Runtime for Proxy [[Construct]].
   2834   __ PopReturnAddressTo(kScratchRegister);
   2835   __ Push(rdi);
   2836   __ Push(rdx);
   2837   __ PushReturnAddressFrom(kScratchRegister);
   2838   // Include the pushed new_target, constructor and the receiver.
   2839   __ addp(rax, Immediate(3));
   2840   __ JumpToExternalReference(
   2841       ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
   2842 }
   2843 
   2844 
   2845 // static
   2846 void Builtins::Generate_Construct(MacroAssembler* masm) {
   2847   // ----------- S t a t e -------------
   2848   //  -- rax : the number of arguments (not including the receiver)
   2849   //  -- rdx : the new target (either the same as the constructor or
   2850   //           the JSFunction on which new was invoked initially)
   2851   //  -- rdi : the constructor to call (can be any Object)
   2852   // -----------------------------------
   2853   StackArgumentsAccessor args(rsp, rax);
   2854 
   2855   // Check if target is a Smi.
   2856   Label non_constructor;
   2857   __ JumpIfSmi(rdi, &non_constructor, Label::kNear);
   2858 
   2859   // Dispatch based on instance type.
   2860   __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
   2861   __ j(equal, masm->isolate()->builtins()->ConstructFunction(),
   2862        RelocInfo::CODE_TARGET);
   2863 
   2864   // Check if target has a [[Construct]] internal method.
   2865   __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
   2866            Immediate(1 << Map::kIsConstructor));
   2867   __ j(zero, &non_constructor, Label::kNear);
   2868 
   2869   // Only dispatch to bound functions after checking whether they are
   2870   // constructors.
   2871   __ CmpInstanceType(rcx, JS_BOUND_FUNCTION_TYPE);
   2872   __ j(equal, masm->isolate()->builtins()->ConstructBoundFunction(),
   2873        RelocInfo::CODE_TARGET);
   2874 
   2875   // Only dispatch to proxies after checking whether they are constructors.
   2876   __ CmpInstanceType(rcx, JS_PROXY_TYPE);
   2877   __ j(equal, masm->isolate()->builtins()->ConstructProxy(),
   2878        RelocInfo::CODE_TARGET);
   2879 
   2880   // Called Construct on an exotic Object with a [[Construct]] internal method.
   2881   {
   2882     // Overwrite the original receiver with the (original) target.
   2883     __ movp(args.GetReceiverOperand(), rdi);
   2884     // Let the "call_as_constructor_delegate" take care of the rest.
   2885     __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, rdi);
   2886     __ Jump(masm->isolate()->builtins()->CallFunction(),
   2887             RelocInfo::CODE_TARGET);
   2888   }
   2889 
   2890   // Called Construct on an Object that doesn't have a [[Construct]] internal
   2891   // method.
   2892   __ bind(&non_constructor);
   2893   __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
   2894           RelocInfo::CODE_TARGET);
   2895 }
   2896 
   2897 
   2898 static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
   2899                                     Register function_template_info,
   2900                                     Register scratch0, Register scratch1,
   2901                                     Register scratch2,
   2902                                     Label* receiver_check_failed) {
   2903   Register signature = scratch0;
   2904   Register map = scratch1;
   2905   Register constructor = scratch2;
   2906 
   2907   // If there is no signature, return the holder.
   2908   __ movp(signature, FieldOperand(function_template_info,
   2909                                   FunctionTemplateInfo::kSignatureOffset));
   2910   __ CompareRoot(signature, Heap::kUndefinedValueRootIndex);
   2911   Label receiver_check_passed;
   2912   __ j(equal, &receiver_check_passed, Label::kNear);
   2913 
   2914   // Walk the prototype chain.
   2915   __ movp(map, FieldOperand(receiver, HeapObject::kMapOffset));
   2916   Label prototype_loop_start;
   2917   __ bind(&prototype_loop_start);
   2918 
   2919   // Get the constructor, if any.
   2920   __ GetMapConstructor(constructor, map, kScratchRegister);
   2921   __ CmpInstanceType(kScratchRegister, JS_FUNCTION_TYPE);
   2922   Label next_prototype;
   2923   __ j(not_equal, &next_prototype, Label::kNear);
   2924 
   2925   // Get the constructor's signature.
   2926   Register type = constructor;
   2927   __ movp(type,
   2928           FieldOperand(constructor, JSFunction::kSharedFunctionInfoOffset));
   2929   __ movp(type, FieldOperand(type, SharedFunctionInfo::kFunctionDataOffset));
   2930 
   2931   // Loop through the chain of inheriting function templates.
   2932   Label function_template_loop;
   2933   __ bind(&function_template_loop);
   2934 
   2935   // If the signatures match, we have a compatible receiver.
   2936   __ cmpp(signature, type);
   2937   __ j(equal, &receiver_check_passed, Label::kNear);
   2938 
   2939   // If the current type is not a FunctionTemplateInfo, load the next prototype
   2940   // in the chain.
   2941   __ JumpIfSmi(type, &next_prototype, Label::kNear);
   2942   __ CmpObjectType(type, FUNCTION_TEMPLATE_INFO_TYPE, kScratchRegister);
   2943   __ j(not_equal, &next_prototype, Label::kNear);
   2944 
   2945   // Otherwise load the parent function template and iterate.
   2946   __ movp(type,
   2947           FieldOperand(type, FunctionTemplateInfo::kParentTemplateOffset));
   2948   __ jmp(&function_template_loop, Label::kNear);
   2949 
   2950   // Load the next prototype.
   2951   __ bind(&next_prototype);
   2952   __ testq(FieldOperand(map, Map::kBitField3Offset),
   2953            Immediate(Map::HasHiddenPrototype::kMask));
   2954   __ j(zero, receiver_check_failed);
   2955   __ movp(receiver, FieldOperand(map, Map::kPrototypeOffset));
   2956   __ movp(map, FieldOperand(receiver, HeapObject::kMapOffset));
   2957   // Iterate.
   2958   __ jmp(&prototype_loop_start, Label::kNear);
   2959 
   2960   __ bind(&receiver_check_passed);
   2961 }
   2962 
   2963 
   2964 void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
   2965   // ----------- S t a t e -------------
   2966   //  -- rax                : number of arguments (not including the receiver)
   2967   //  -- rdi                : callee
   2968   //  -- rsi                : context
   2969   //  -- rsp[0]             : return address
   2970   //  -- rsp[8]             : last argument
   2971   //  -- ...
   2972   //  -- rsp[rax * 8]       : first argument
   2973   //  -- rsp[(rax + 1) * 8] : receiver
   2974   // -----------------------------------
   2975 
   2976   StackArgumentsAccessor args(rsp, rax);
   2977 
   2978   // Load the FunctionTemplateInfo.
   2979   __ movp(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
   2980   __ movp(rbx, FieldOperand(rbx, SharedFunctionInfo::kFunctionDataOffset));
   2981 
   2982   // Do the compatible receiver check.
   2983   Label receiver_check_failed;
   2984   __ movp(rcx, args.GetReceiverOperand());
   2985   CompatibleReceiverCheck(masm, rcx, rbx, rdx, r8, r9, &receiver_check_failed);
   2986 
   2987   // Get the callback offset from the FunctionTemplateInfo, and jump to the
   2988   // beginning of the code.
   2989   __ movp(rdx, FieldOperand(rbx, FunctionTemplateInfo::kCallCodeOffset));
   2990   __ movp(rdx, FieldOperand(rdx, CallHandlerInfo::kFastHandlerOffset));
   2991   __ addp(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag));
   2992   __ jmp(rdx);
   2993 
   2994   // Compatible receiver check failed: pop return address, arguments and
   2995   // receiver and throw an Illegal Invocation exception.
   2996   __ bind(&receiver_check_failed);
   2997   __ PopReturnAddressTo(rbx);
   2998   __ leap(rax, Operand(rax, times_pointer_size, 1 * kPointerSize));
   2999   __ addp(rsp, rax);
   3000   __ PushReturnAddressFrom(rbx);
   3001   {
   3002     FrameScope scope(masm, StackFrame::INTERNAL);
   3003     __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
   3004   }
   3005 }
   3006 
   3007 
   3008 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
   3009   // Lookup the function in the JavaScript frame.
   3010   __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
   3011   {
   3012     FrameScope scope(masm, StackFrame::INTERNAL);
   3013     // Pass function as argument.
   3014     __ Push(rax);
   3015     __ CallRuntime(Runtime::kCompileForOnStackReplacement);
   3016   }
   3017 
   3018   Label skip;
   3019   // If the code object is null, just return to the unoptimized code.
   3020   __ cmpp(rax, Immediate(0));
   3021   __ j(not_equal, &skip, Label::kNear);
   3022   __ ret(0);
   3023 
   3024   __ bind(&skip);
   3025 
   3026   // Load deoptimization data from the code object.
   3027   __ movp(rbx, Operand(rax, Code::kDeoptimizationDataOffset - kHeapObjectTag));
   3028 
   3029   // Load the OSR entrypoint offset from the deoptimization data.
   3030   __ SmiToInteger32(rbx, Operand(rbx, FixedArray::OffsetOfElementAt(
   3031       DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag));
   3032 
   3033   // Compute the target address = code_obj + header_size + osr_offset
   3034   __ leap(rax, Operand(rax, rbx, times_1, Code::kHeaderSize - kHeapObjectTag));
   3035 
   3036   // Overwrite the return address on the stack.
   3037   __ movq(StackOperandForReturnAddress(0), rax);
   3038 
   3039   // And "return" to the OSR entry point of the function.
   3040   __ ret(0);
   3041 }
   3042 
   3043 
   3044 #undef __
   3045 
   3046 }  // namespace internal
   3047 }  // namespace v8
   3048 
   3049 #endif  // V8_TARGET_ARCH_X64
   3050