Home | History | Annotate | Download | only in x87
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #if V8_TARGET_ARCH_X87
      6 
      7 #include "src/code-factory.h"
      8 #include "src/codegen.h"
      9 #include "src/deoptimizer.h"
     10 #include "src/full-codegen/full-codegen.h"
     11 #include "src/x87/frames-x87.h"
     12 
     13 namespace v8 {
     14 namespace internal {
     15 
     16 #define __ ACCESS_MASM(masm)
     17 
     18 void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
     19                                 ExitFrameType exit_frame_type) {
     20   // ----------- S t a t e -------------
     21   //  -- eax                : number of arguments excluding receiver
     22   //  -- edi                : target
     23   //  -- edx                : new.target
     24   //  -- esp[0]             : return address
     25   //  -- esp[4]             : last argument
     26   //  -- ...
     27   //  -- esp[4 * argc]      : first argument
     28   //  -- esp[4 * (argc +1)] : receiver
     29   // -----------------------------------
     30   __ AssertFunction(edi);
     31 
     32   // Make sure we operate in the context of the called function (for example
     33   // ConstructStubs implemented in C++ will be run in the context of the caller
     34   // instead of the callee, due to the way that [[Construct]] is defined for
     35   // ordinary functions).
     36   __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
     37 
     38   // JumpToExternalReference expects eax to contain the number of arguments
     39   // including the receiver and the extra arguments.
     40   const int num_extra_args = 3;
     41   __ add(eax, Immediate(num_extra_args + 1));
     42 
     43   // Insert extra arguments.
     44   __ PopReturnAddressTo(ecx);
     45   __ SmiTag(eax);
     46   __ Push(eax);
     47   __ SmiUntag(eax);
     48   __ Push(edi);
     49   __ Push(edx);
     50   __ PushReturnAddressFrom(ecx);
     51 
     52   __ JumpToExternalReference(ExternalReference(address, masm->isolate()),
     53                              exit_frame_type == BUILTIN_EXIT);
     54 }
     55 
     56 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
     57                                            Runtime::FunctionId function_id) {
     58   // ----------- S t a t e -------------
     59   //  -- eax : argument count (preserved for callee)
     60   //  -- edx : new target (preserved for callee)
     61   //  -- edi : target function (preserved for callee)
     62   // -----------------------------------
     63   {
     64     FrameScope scope(masm, StackFrame::INTERNAL);
     65     // Push the number of arguments to the callee.
     66     __ SmiTag(eax);
     67     __ push(eax);
     68     // Push a copy of the target function and the new target.
     69     __ push(edi);
     70     __ push(edx);
     71     // Function is also the parameter to the runtime call.
     72     __ push(edi);
     73 
     74     __ CallRuntime(function_id, 1);
     75     __ mov(ebx, eax);
     76 
     77     // Restore target function and new target.
     78     __ pop(edx);
     79     __ pop(edi);
     80     __ pop(eax);
     81     __ SmiUntag(eax);
     82   }
     83 
     84   __ lea(ebx, FieldOperand(ebx, Code::kHeaderSize));
     85   __ jmp(ebx);
     86 }
     87 
     88 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
     89   __ mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
     90   __ mov(ebx, FieldOperand(ebx, SharedFunctionInfo::kCodeOffset));
     91   __ lea(ebx, FieldOperand(ebx, Code::kHeaderSize));
     92   __ jmp(ebx);
     93 }
     94 
     95 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
     96   // Checking whether the queued function is ready for install is optional,
     97   // since we come across interrupts and stack checks elsewhere.  However,
     98   // not checking may delay installing ready functions, and always checking
     99   // would be quite expensive.  A good compromise is to first check against
    100   // stack limit as a cue for an interrupt signal.
    101   Label ok;
    102   ExternalReference stack_limit =
    103       ExternalReference::address_of_stack_limit(masm->isolate());
    104   __ cmp(esp, Operand::StaticVariable(stack_limit));
    105   __ j(above_equal, &ok, Label::kNear);
    106 
    107   GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
    108 
    109   __ bind(&ok);
    110   GenerateTailCallToSharedCode(masm);
    111 }
    112 
    113 namespace {
    114 
    115 void Generate_JSConstructStubHelper(MacroAssembler* masm, bool is_api_function,
    116                                     bool create_implicit_receiver,
    117                                     bool check_derived_construct) {
    118   // ----------- S t a t e -------------
    119   //  -- eax: number of arguments
    120   //  -- esi: context
    121   //  -- edi: constructor function
    122   //  -- edx: new target
    123   // -----------------------------------
    124 
    125   // Enter a construct frame.
    126   {
    127     FrameScope scope(masm, StackFrame::CONSTRUCT);
    128 
    129     // Preserve the incoming parameters on the stack.
    130     __ SmiTag(eax);
    131     __ push(esi);
    132     __ push(eax);
    133 
    134     if (create_implicit_receiver) {
    135       // Allocate the new receiver object.
    136       __ Push(edi);
    137       __ Push(edx);
    138       __ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
    139               RelocInfo::CODE_TARGET);
    140       __ mov(ebx, eax);
    141       __ Pop(edx);
    142       __ Pop(edi);
    143 
    144       // ----------- S t a t e -------------
    145       //  -- edi: constructor function
    146       //  -- ebx: newly allocated object
    147       //  -- edx: new target
    148       // -----------------------------------
    149 
    150       // Retrieve smi-tagged arguments count from the stack.
    151       __ mov(eax, Operand(esp, 0));
    152     }
    153 
    154     __ SmiUntag(eax);
    155 
    156     if (create_implicit_receiver) {
    157       // Push the allocated receiver to the stack. We need two copies
    158       // because we may have to return the original one and the calling
    159       // conventions dictate that the called function pops the receiver.
    160       __ push(ebx);
    161       __ push(ebx);
    162     } else {
    163       __ PushRoot(Heap::kTheHoleValueRootIndex);
    164     }
    165 
    166     // Set up pointer to last argument.
    167     __ lea(ebx, Operand(ebp, StandardFrameConstants::kCallerSPOffset));
    168 
    169     // Copy arguments and receiver to the expression stack.
    170     Label loop, entry;
    171     __ mov(ecx, eax);
    172     __ jmp(&entry);
    173     __ bind(&loop);
    174     __ push(Operand(ebx, ecx, times_4, 0));
    175     __ bind(&entry);
    176     __ dec(ecx);
    177     __ j(greater_equal, &loop);
    178 
    179     // Call the function.
    180     ParameterCount actual(eax);
    181     __ InvokeFunction(edi, edx, actual, CALL_FUNCTION,
    182                       CheckDebugStepCallWrapper());
    183 
    184     // Store offset of return address for deoptimizer.
    185     if (create_implicit_receiver && !is_api_function) {
    186       masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
    187     }
    188 
    189     // Restore context from the frame.
    190     __ mov(esi, Operand(ebp, ConstructFrameConstants::kContextOffset));
    191 
    192     if (create_implicit_receiver) {
    193       // If the result is an object (in the ECMA sense), we should get rid
    194       // of the receiver and use the result; see ECMA-262 section 13.2.2-7
    195       // on page 74.
    196       Label use_receiver, exit;
    197 
    198       // If the result is a smi, it is *not* an object in the ECMA sense.
    199       __ JumpIfSmi(eax, &use_receiver, Label::kNear);
    200 
    201       // If the type of the result (stored in its map) is less than
    202       // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
    203       __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
    204       __ j(above_equal, &exit, Label::kNear);
    205 
    206       // Throw away the result of the constructor invocation and use the
    207       // on-stack receiver as the result.
    208       __ bind(&use_receiver);
    209       __ mov(eax, Operand(esp, 0));
    210 
    211       // Restore the arguments count and leave the construct frame. The
    212       // arguments count is stored below the receiver.
    213       __ bind(&exit);
    214       __ mov(ebx, Operand(esp, 1 * kPointerSize));
    215     } else {
    216       __ mov(ebx, Operand(esp, 0));
    217     }
    218 
    219     // Leave construct frame.
    220   }
    221 
    222   // ES6 9.2.2. Step 13+
    223   // Check that the result is not a Smi, indicating that the constructor result
    224   // from a derived class is neither undefined nor an Object.
    225   if (check_derived_construct) {
    226     Label dont_throw;
    227     __ JumpIfNotSmi(eax, &dont_throw);
    228     {
    229       FrameScope scope(masm, StackFrame::INTERNAL);
    230       __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
    231     }
    232     __ bind(&dont_throw);
    233   }
    234 
    235   // Remove caller arguments from the stack and return.
    236   STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
    237   __ pop(ecx);
    238   __ lea(esp, Operand(esp, ebx, times_2, 1 * kPointerSize));  // 1 ~ receiver
    239   __ push(ecx);
    240   if (create_implicit_receiver) {
    241     __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1);
    242   }
    243   __ ret(0);
    244 }
    245 
    246 }  // namespace
    247 
    248 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
    249   Generate_JSConstructStubHelper(masm, false, true, false);
    250 }
    251 
    252 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
    253   Generate_JSConstructStubHelper(masm, true, false, false);
    254 }
    255 
    256 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
    257   Generate_JSConstructStubHelper(masm, false, false, false);
    258 }
    259 
    260 void Builtins::Generate_JSBuiltinsConstructStubForDerived(
    261     MacroAssembler* masm) {
    262   Generate_JSConstructStubHelper(masm, false, false, true);
    263 }
    264 
    265 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
    266   FrameScope scope(masm, StackFrame::INTERNAL);
    267   __ push(edi);
    268   __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
    269 }
    270 
    271 enum IsTagged { kEaxIsSmiTagged, kEaxIsUntaggedInt };
    272 
    273 // Clobbers ecx, edx, edi; preserves all other registers.
    274 static void Generate_CheckStackOverflow(MacroAssembler* masm,
    275                                         IsTagged eax_is_tagged) {
    276   // eax   : the number of items to be pushed to the stack
    277   //
    278   // Check the stack for overflow. We are not trying to catch
    279   // interruptions (e.g. debug break and preemption) here, so the "real stack
    280   // limit" is checked.
    281   Label okay;
    282   ExternalReference real_stack_limit =
    283       ExternalReference::address_of_real_stack_limit(masm->isolate());
    284   __ mov(edi, Operand::StaticVariable(real_stack_limit));
    285   // Make ecx the space we have left. The stack might already be overflowed
    286   // here which will cause ecx to become negative.
    287   __ mov(ecx, esp);
    288   __ sub(ecx, edi);
    289   // Make edx the space we need for the array when it is unrolled onto the
    290   // stack.
    291   __ mov(edx, eax);
    292   int smi_tag = eax_is_tagged == kEaxIsSmiTagged ? kSmiTagSize : 0;
    293   __ shl(edx, kPointerSizeLog2 - smi_tag);
    294   // Check if the arguments will overflow the stack.
    295   __ cmp(ecx, edx);
    296   __ j(greater, &okay);  // Signed comparison.
    297 
    298   // Out of stack space.
    299   __ CallRuntime(Runtime::kThrowStackOverflow);
    300 
    301   __ bind(&okay);
    302 }
    303 
    304 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
    305                                              bool is_construct) {
    306   ProfileEntryHookStub::MaybeCallEntryHook(masm);
    307 
    308   {
    309     FrameScope scope(masm, StackFrame::INTERNAL);
    310 
    311     // Setup the context (we need to use the caller context from the isolate).
    312     ExternalReference context_address(Isolate::kContextAddress,
    313                                       masm->isolate());
    314     __ mov(esi, Operand::StaticVariable(context_address));
    315 
    316     // Load the previous frame pointer (ebx) to access C arguments
    317     __ mov(ebx, Operand(ebp, 0));
    318 
    319     // Push the function and the receiver onto the stack.
    320     __ push(Operand(ebx, EntryFrameConstants::kFunctionArgOffset));
    321     __ push(Operand(ebx, EntryFrameConstants::kReceiverArgOffset));
    322 
    323     // Load the number of arguments and setup pointer to the arguments.
    324     __ mov(eax, Operand(ebx, EntryFrameConstants::kArgcOffset));
    325     __ mov(ebx, Operand(ebx, EntryFrameConstants::kArgvOffset));
    326 
    327     // Check if we have enough stack space to push all arguments.
    328     // Expects argument count in eax. Clobbers ecx, edx, edi.
    329     Generate_CheckStackOverflow(masm, kEaxIsUntaggedInt);
    330 
    331     // Copy arguments to the stack in a loop.
    332     Label loop, entry;
    333     __ Move(ecx, Immediate(0));
    334     __ jmp(&entry, Label::kNear);
    335     __ bind(&loop);
    336     __ mov(edx, Operand(ebx, ecx, times_4, 0));  // push parameter from argv
    337     __ push(Operand(edx, 0));                    // dereference handle
    338     __ inc(ecx);
    339     __ bind(&entry);
    340     __ cmp(ecx, eax);
    341     __ j(not_equal, &loop);
    342 
    343     // Load the previous frame pointer (ebx) to access C arguments
    344     __ mov(ebx, Operand(ebp, 0));
    345 
    346     // Get the new.target and function from the frame.
    347     __ mov(edx, Operand(ebx, EntryFrameConstants::kNewTargetArgOffset));
    348     __ mov(edi, Operand(ebx, EntryFrameConstants::kFunctionArgOffset));
    349 
    350     // Invoke the code.
    351     Handle<Code> builtin = is_construct
    352                                ? masm->isolate()->builtins()->Construct()
    353                                : masm->isolate()->builtins()->Call();
    354     __ Call(builtin, RelocInfo::CODE_TARGET);
    355 
    356     // Exit the internal frame. Notice that this also removes the empty.
    357     // context and the function left on the stack by the code
    358     // invocation.
    359   }
    360   __ ret(kPointerSize);  // Remove receiver.
    361 }
    362 
    363 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
    364   Generate_JSEntryTrampolineHelper(masm, false);
    365 }
    366 
    367 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
    368   Generate_JSEntryTrampolineHelper(masm, true);
    369 }
    370 
    371 // static
    372 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
    373   // ----------- S t a t e -------------
    374   //  -- eax    : the value to pass to the generator
    375   //  -- ebx    : the JSGeneratorObject to resume
    376   //  -- edx    : the resume mode (tagged)
    377   //  -- esp[0] : return address
    378   // -----------------------------------
    379   __ AssertGeneratorObject(ebx);
    380 
    381   // Store input value into generator object.
    382   __ mov(FieldOperand(ebx, JSGeneratorObject::kInputOrDebugPosOffset), eax);
    383   __ RecordWriteField(ebx, JSGeneratorObject::kInputOrDebugPosOffset, eax, ecx,
    384                       kDontSaveFPRegs);
    385 
    386   // Store resume mode into generator object.
    387   __ mov(FieldOperand(ebx, JSGeneratorObject::kResumeModeOffset), edx);
    388 
    389   // Load suspended function and context.
    390   __ mov(edi, FieldOperand(ebx, JSGeneratorObject::kFunctionOffset));
    391   __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
    392 
    393   // Flood function if we are stepping.
    394   Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
    395   Label stepping_prepared;
    396   ExternalReference debug_hook =
    397       ExternalReference::debug_hook_on_function_call_address(masm->isolate());
    398   __ cmpb(Operand::StaticVariable(debug_hook), Immediate(0));
    399   __ j(not_equal, &prepare_step_in_if_stepping);
    400 
    401   // Flood function if we need to continue stepping in the suspended generator.
    402   ExternalReference debug_suspended_generator =
    403       ExternalReference::debug_suspended_generator_address(masm->isolate());
    404   __ cmp(ebx, Operand::StaticVariable(debug_suspended_generator));
    405   __ j(equal, &prepare_step_in_suspended_generator);
    406   __ bind(&stepping_prepared);
    407 
    408   // Pop return address.
    409   __ PopReturnAddressTo(eax);
    410 
    411   // Push receiver.
    412   __ Push(FieldOperand(ebx, JSGeneratorObject::kReceiverOffset));
    413 
    414   // ----------- S t a t e -------------
    415   //  -- eax    : return address
    416   //  -- ebx    : the JSGeneratorObject to resume
    417   //  -- edx    : the resume mode (tagged)
    418   //  -- edi    : generator function
    419   //  -- esi    : generator context
    420   //  -- esp[0] : generator receiver
    421   // -----------------------------------
    422 
    423   // Push holes for arguments to generator function. Since the parser forced
    424   // context allocation for any variables in generators, the actual argument
    425   // values have already been copied into the context and these dummy values
    426   // will never be used.
    427   __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
    428   __ mov(ecx,
    429          FieldOperand(ecx, SharedFunctionInfo::kFormalParameterCountOffset));
    430   {
    431     Label done_loop, loop;
    432     __ bind(&loop);
    433     __ sub(ecx, Immediate(Smi::FromInt(1)));
    434     __ j(carry, &done_loop, Label::kNear);
    435     __ PushRoot(Heap::kTheHoleValueRootIndex);
    436     __ jmp(&loop);
    437     __ bind(&done_loop);
    438   }
    439 
    440   // Underlying function needs to have bytecode available.
    441   if (FLAG_debug_code) {
    442     __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
    443     __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kFunctionDataOffset));
    444     __ CmpObjectType(ecx, BYTECODE_ARRAY_TYPE, ecx);
    445     __ Assert(equal, kMissingBytecodeArray);
    446   }
    447 
    448   // Resume (Ignition/TurboFan) generator object.
    449   {
    450     __ PushReturnAddressFrom(eax);
    451     __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
    452     __ mov(eax,
    453            FieldOperand(eax, SharedFunctionInfo::kFormalParameterCountOffset));
    454     // We abuse new.target both to indicate that this is a resume call and to
    455     // pass in the generator object.  In ordinary calls, new.target is always
    456     // undefined because generator functions are non-constructable.
    457     __ mov(edx, ebx);
    458     __ jmp(FieldOperand(edi, JSFunction::kCodeEntryOffset));
    459   }
    460 
    461   __ bind(&prepare_step_in_if_stepping);
    462   {
    463     FrameScope scope(masm, StackFrame::INTERNAL);
    464     __ Push(ebx);
    465     __ Push(edx);
    466     __ Push(edi);
    467     __ CallRuntime(Runtime::kDebugOnFunctionCall);
    468     __ Pop(edx);
    469     __ Pop(ebx);
    470     __ mov(edi, FieldOperand(ebx, JSGeneratorObject::kFunctionOffset));
    471   }
    472   __ jmp(&stepping_prepared);
    473 
    474   __ bind(&prepare_step_in_suspended_generator);
    475   {
    476     FrameScope scope(masm, StackFrame::INTERNAL);
    477     __ Push(ebx);
    478     __ Push(edx);
    479     __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
    480     __ Pop(edx);
    481     __ Pop(ebx);
    482     __ mov(edi, FieldOperand(ebx, JSGeneratorObject::kFunctionOffset));
    483   }
    484   __ jmp(&stepping_prepared);
    485 }
    486 
    487 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch1,
    488                                   Register scratch2) {
    489   Register args_count = scratch1;
    490   Register return_pc = scratch2;
    491 
    492   // Get the arguments + reciever count.
    493   __ mov(args_count,
    494          Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
    495   __ mov(args_count,
    496          FieldOperand(args_count, BytecodeArray::kParameterSizeOffset));
    497 
    498   // Leave the frame (also dropping the register file).
    499   __ leave();
    500 
    501   // Drop receiver + arguments.
    502   __ pop(return_pc);
    503   __ add(esp, args_count);
    504   __ push(return_pc);
    505 }
    506 
    507 // Generate code for entering a JS function with the interpreter.
    508 // On entry to the function the receiver and arguments have been pushed on the
    509 // stack left to right.  The actual argument count matches the formal parameter
    510 // count expected by the function.
    511 //
    512 // The live registers are:
    513 //   o edi: the JS function object being called
    514 //   o edx: the new target
    515 //   o esi: our context
    516 //   o ebp: the caller's frame pointer
    517 //   o esp: stack pointer (pointing to return address)
    518 //
    519 // The function builds an interpreter frame.  See InterpreterFrameConstants in
    520 // frames.h for its layout.
    521 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
    522   ProfileEntryHookStub::MaybeCallEntryHook(masm);
    523 
    524   // Open a frame scope to indicate that there is a frame on the stack.  The
    525   // MANUAL indicates that the scope shouldn't actually generate code to set up
    526   // the frame (that is done below).
    527   FrameScope frame_scope(masm, StackFrame::MANUAL);
    528   __ push(ebp);  // Caller's frame pointer.
    529   __ mov(ebp, esp);
    530   __ push(esi);  // Callee's context.
    531   __ push(edi);  // Callee's JS function.
    532   __ push(edx);  // Callee's new target.
    533 
    534   // Get the bytecode array from the function object (or from the DebugInfo if
    535   // it is present) and load it into kInterpreterBytecodeArrayRegister.
    536   __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
    537   Label load_debug_bytecode_array, bytecode_array_loaded;
    538   __ JumpIfNotSmi(FieldOperand(eax, SharedFunctionInfo::kDebugInfoOffset),
    539                   &load_debug_bytecode_array);
    540   __ mov(kInterpreterBytecodeArrayRegister,
    541          FieldOperand(eax, SharedFunctionInfo::kFunctionDataOffset));
    542   __ bind(&bytecode_array_loaded);
    543 
    544   // Check whether we should continue to use the interpreter.
    545   Label switch_to_different_code_kind;
    546   __ Move(ecx, masm->CodeObject());  // Self-reference to this code.
    547   __ cmp(ecx, FieldOperand(eax, SharedFunctionInfo::kCodeOffset));
    548   __ j(not_equal, &switch_to_different_code_kind);
    549 
    550   // Increment invocation count for the function.
    551   __ EmitLoadFeedbackVector(ecx);
    552   __ add(
    553       FieldOperand(ecx, FeedbackVector::kInvocationCountIndex * kPointerSize +
    554                             FeedbackVector::kHeaderSize),
    555       Immediate(Smi::FromInt(1)));
    556 
    557   // Check function data field is actually a BytecodeArray object.
    558   if (FLAG_debug_code) {
    559     __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
    560     __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
    561                      eax);
    562     __ Assert(equal, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
    563   }
    564 
    565   // Reset code age.
    566   __ mov_b(FieldOperand(kInterpreterBytecodeArrayRegister,
    567                         BytecodeArray::kBytecodeAgeOffset),
    568            Immediate(BytecodeArray::kNoAgeBytecodeAge));
    569 
    570   // Push bytecode array.
    571   __ push(kInterpreterBytecodeArrayRegister);
    572   // Push Smi tagged initial bytecode array offset.
    573   __ push(Immediate(Smi::FromInt(BytecodeArray::kHeaderSize - kHeapObjectTag)));
    574 
    575   // Allocate the local and temporary register file on the stack.
    576   {
    577     // Load frame size from the BytecodeArray object.
    578     __ mov(ebx, FieldOperand(kInterpreterBytecodeArrayRegister,
    579                              BytecodeArray::kFrameSizeOffset));
    580 
    581     // Do a stack check to ensure we don't go over the limit.
    582     Label ok;
    583     __ mov(ecx, esp);
    584     __ sub(ecx, ebx);
    585     ExternalReference stack_limit =
    586         ExternalReference::address_of_real_stack_limit(masm->isolate());
    587     __ cmp(ecx, Operand::StaticVariable(stack_limit));
    588     __ j(above_equal, &ok);
    589     __ CallRuntime(Runtime::kThrowStackOverflow);
    590     __ bind(&ok);
    591 
    592     // If ok, push undefined as the initial value for all register file entries.
    593     Label loop_header;
    594     Label loop_check;
    595     __ mov(eax, Immediate(masm->isolate()->factory()->undefined_value()));
    596     __ jmp(&loop_check);
    597     __ bind(&loop_header);
    598     // TODO(rmcilroy): Consider doing more than one push per loop iteration.
    599     __ push(eax);
    600     // Continue loop if not done.
    601     __ bind(&loop_check);
    602     __ sub(ebx, Immediate(kPointerSize));
    603     __ j(greater_equal, &loop_header);
    604   }
    605 
    606   // Load accumulator, bytecode offset and dispatch table into registers.
    607   __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
    608   __ mov(kInterpreterBytecodeOffsetRegister,
    609          Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));
    610   __ mov(kInterpreterDispatchTableRegister,
    611          Immediate(ExternalReference::interpreter_dispatch_table_address(
    612              masm->isolate())));
    613 
    614   // Dispatch to the first bytecode handler for the function.
    615   __ movzx_b(ebx, Operand(kInterpreterBytecodeArrayRegister,
    616                           kInterpreterBytecodeOffsetRegister, times_1, 0));
    617   __ mov(ebx, Operand(kInterpreterDispatchTableRegister, ebx,
    618                       times_pointer_size, 0));
    619   __ call(ebx);
    620   masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
    621 
    622   // The return value is in eax.
    623   LeaveInterpreterFrame(masm, ebx, ecx);
    624   __ ret(0);
    625 
    626   // Load debug copy of the bytecode array.
    627   __ bind(&load_debug_bytecode_array);
    628   Register debug_info = kInterpreterBytecodeArrayRegister;
    629   __ mov(debug_info, FieldOperand(eax, SharedFunctionInfo::kDebugInfoOffset));
    630   __ mov(kInterpreterBytecodeArrayRegister,
    631          FieldOperand(debug_info, DebugInfo::kDebugBytecodeArrayIndex));
    632   __ jmp(&bytecode_array_loaded);
    633 
    634   // If the shared code is no longer this entry trampoline, then the underlying
    635   // function has been switched to a different kind of code and we heal the
    636   // closure by switching the code entry field over to the new code as well.
    637   __ bind(&switch_to_different_code_kind);
    638   __ pop(edx);  // Callee's new target.
    639   __ pop(edi);  // Callee's JS function.
    640   __ pop(esi);  // Callee's context.
    641   __ leave();   // Leave the frame so we can tail call.
    642   __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
    643   __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kCodeOffset));
    644   __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize));
    645   __ mov(FieldOperand(edi, JSFunction::kCodeEntryOffset), ecx);
    646   __ RecordWriteCodeEntryField(edi, ecx, ebx);
    647   __ jmp(ecx);
    648 }
    649 
    650 static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
    651                                         Register scratch1, Register scratch2,
    652                                         Label* stack_overflow,
    653                                         bool include_receiver = false) {
    654   // Check the stack for overflow. We are not trying to catch
    655   // interruptions (e.g. debug break and preemption) here, so the "real stack
    656   // limit" is checked.
    657   ExternalReference real_stack_limit =
    658       ExternalReference::address_of_real_stack_limit(masm->isolate());
    659   __ mov(scratch1, Operand::StaticVariable(real_stack_limit));
    660   // Make scratch2 the space we have left. The stack might already be overflowed
    661   // here which will cause scratch2 to become negative.
    662   __ mov(scratch2, esp);
    663   __ sub(scratch2, scratch1);
    664   // Make scratch1 the space we need for the array when it is unrolled onto the
    665   // stack.
    666   __ mov(scratch1, num_args);
    667   if (include_receiver) {
    668     __ add(scratch1, Immediate(1));
    669   }
    670   __ shl(scratch1, kPointerSizeLog2);
    671   // Check if the arguments will overflow the stack.
    672   __ cmp(scratch2, scratch1);
    673   __ j(less_equal, stack_overflow);  // Signed comparison.
    674 }
    675 
    676 static void Generate_InterpreterPushArgs(MacroAssembler* masm,
    677                                          Register array_limit,
    678                                          Register start_address) {
    679   // ----------- S t a t e -------------
    680   //  -- start_address : Pointer to the last argument in the args array.
    681   //  -- array_limit : Pointer to one before the first argument in the
    682   //                   args array.
    683   // -----------------------------------
    684   Label loop_header, loop_check;
    685   __ jmp(&loop_check);
    686   __ bind(&loop_header);
    687   __ Push(Operand(start_address, 0));
    688   __ sub(start_address, Immediate(kPointerSize));
    689   __ bind(&loop_check);
    690   __ cmp(start_address, array_limit);
    691   __ j(greater, &loop_header, Label::kNear);
    692 }
    693 
    694 // static
    695 void Builtins::Generate_InterpreterPushArgsAndCallImpl(
    696     MacroAssembler* masm, TailCallMode tail_call_mode,
    697     InterpreterPushArgsMode mode) {
    698   // ----------- S t a t e -------------
    699   //  -- eax : the number of arguments (not including the receiver)
    700   //  -- ebx : the address of the first argument to be pushed. Subsequent
    701   //           arguments should be consecutive above this, in the same order as
    702   //           they are to be pushed onto the stack.
    703   //  -- edi : the target to call (can be any Object).
    704   // -----------------------------------
    705   Label stack_overflow;
    706   // Compute the expected number of arguments.
    707   __ mov(ecx, eax);
    708   __ add(ecx, Immediate(1));  // Add one for receiver.
    709 
    710   // Add a stack check before pushing the arguments. We need an extra register
    711   // to perform a stack check. So push it onto the stack temporarily. This
    712   // might cause stack overflow, but it will be detected by the check.
    713   __ Push(edi);
    714   Generate_StackOverflowCheck(masm, ecx, edx, edi, &stack_overflow);
    715   __ Pop(edi);
    716 
    717   // Pop return address to allow tail-call after pushing arguments.
    718   __ Pop(edx);
    719 
    720   // Find the address of the last argument.
    721   __ shl(ecx, kPointerSizeLog2);
    722   __ neg(ecx);
    723   __ add(ecx, ebx);
    724   Generate_InterpreterPushArgs(masm, ecx, ebx);
    725 
    726   // Call the target.
    727   __ Push(edx);  // Re-push return address.
    728 
    729   if (mode == InterpreterPushArgsMode::kJSFunction) {
    730     __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny,
    731                                                       tail_call_mode),
    732             RelocInfo::CODE_TARGET);
    733   } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
    734     __ Jump(masm->isolate()->builtins()->CallWithSpread(),
    735             RelocInfo::CODE_TARGET);
    736   } else {
    737     __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
    738                                               tail_call_mode),
    739             RelocInfo::CODE_TARGET);
    740   }
    741 
    742   __ bind(&stack_overflow);
    743   {
    744     // Pop the temporary registers, so that return address is on top of stack.
    745     __ Pop(edi);
    746 
    747     __ TailCallRuntime(Runtime::kThrowStackOverflow);
    748 
    749     // This should be unreachable.
    750     __ int3();
    751   }
    752 }
    753 
    754 namespace {
    755 
    756 // This function modified start_addr, and only reads the contents of num_args
    757 // register. scratch1 and scratch2 are used as temporary registers. Their
    758 // original values are restored after the use.
    759 void Generate_InterpreterPushArgsAndReturnAddress(
    760     MacroAssembler* masm, Register num_args, Register start_addr,
    761     Register scratch1, Register scratch2, bool receiver_in_args,
    762     int num_slots_above_ret_addr, Label* stack_overflow) {
    763   // We have to move return address and the temporary registers above it
    764   // before we can copy arguments onto the stack. To achieve this:
    765   // Step 1: Increment the stack pointer by num_args + 1 (for receiver).
    766   // Step 2: Move the return address and values above it to the top of stack.
    767   // Step 3: Copy the arguments into the correct locations.
    768   //  current stack    =====>    required stack layout
    769   // |             |            | scratch1      | (2) <-- esp(1)
    770   // |             |            | ....          | (2)
    771   // |             |            | scratch-n     | (2)
    772   // |             |            | return addr   | (2)
    773   // |             |            | arg N         | (3)
    774   // | scratch1    | <-- esp    | ....          |
    775   // | ....        |            | arg 0         |
    776   // | scratch-n   |            | arg 0         |
    777   // | return addr |            | receiver slot |
    778 
    779   // Check for stack overflow before we increment the stack pointer.
    780   Generate_StackOverflowCheck(masm, num_args, scratch1, scratch2,
    781                               stack_overflow, true);
    782 
    783 // Step 1 - Update the stack pointer. scratch1 already contains the required
    784 // increment to the stack. i.e. num_args + 1 stack slots. This is computed in
    785 // the Generate_StackOverflowCheck.
    786 
    787 #ifdef _MSC_VER
    788   // TODO(mythria): Move it to macro assembler.
    789   // In windows, we cannot increment the stack size by more than one page
    790   // (mimimum page size is 4KB) without accessing at least one byte on the
    791   // page. Check this:
    792   // https://msdn.microsoft.com/en-us/library/aa227153(v=vs.60).aspx.
    793   const int page_size = 4 * 1024;
    794   Label check_offset, update_stack_pointer;
    795   __ bind(&check_offset);
    796   __ cmp(scratch1, page_size);
    797   __ j(less, &update_stack_pointer);
    798   __ sub(esp, Immediate(page_size));
    799   // Just to touch the page, before we increment further.
    800   __ mov(Operand(esp, 0), Immediate(0));
    801   __ sub(scratch1, Immediate(page_size));
    802   __ jmp(&check_offset);
    803   __ bind(&update_stack_pointer);
    804 #endif
    805 
    806   __ sub(esp, scratch1);
    807 
    808   // Step 2 move return_address and slots above it to the correct locations.
    809   // Move from top to bottom, otherwise we may overwrite when num_args = 0 or 1,
    810   // basically when the source and destination overlap. We at least need one
    811   // extra slot for receiver, so no extra checks are required to avoid copy.
    812   for (int i = 0; i < num_slots_above_ret_addr + 1; i++) {
    813     __ mov(scratch1,
    814            Operand(esp, num_args, times_pointer_size, (i + 1) * kPointerSize));
    815     __ mov(Operand(esp, i * kPointerSize), scratch1);
    816   }
    817 
    818   // Step 3 copy arguments to correct locations.
    819   if (receiver_in_args) {
    820     __ mov(scratch1, num_args);
    821     __ add(scratch1, Immediate(1));
    822   } else {
    823     // Slot meant for receiver contains return address. Reset it so that
    824     // we will not incorrectly interpret return address as an object.
    825     __ mov(Operand(esp, num_args, times_pointer_size,
    826                    (num_slots_above_ret_addr + 1) * kPointerSize),
    827            Immediate(0));
    828     __ mov(scratch1, num_args);
    829   }
    830 
    831   Label loop_header, loop_check;
    832   __ jmp(&loop_check);
    833   __ bind(&loop_header);
    834   __ mov(scratch2, Operand(start_addr, 0));
    835   __ mov(Operand(esp, scratch1, times_pointer_size,
    836                  num_slots_above_ret_addr * kPointerSize),
    837          scratch2);
    838   __ sub(start_addr, Immediate(kPointerSize));
    839   __ sub(scratch1, Immediate(1));
    840   __ bind(&loop_check);
    841   __ cmp(scratch1, Immediate(0));
    842   __ j(greater, &loop_header, Label::kNear);
    843 }
    844 
    845 }  // end anonymous namespace
    846 
    847 // static
    848 void Builtins::Generate_InterpreterPushArgsAndConstructImpl(
    849     MacroAssembler* masm, InterpreterPushArgsMode mode) {
    850   // ----------- S t a t e -------------
    851   //  -- eax : the number of arguments (not including the receiver)
    852   //  -- edx : the new target
    853   //  -- edi : the constructor
    854   //  -- ebx : allocation site feedback (if available or undefined)
    855   //  -- ecx : the address of the first argument to be pushed. Subsequent
    856   //           arguments should be consecutive above this, in the same order as
    857   //           they are to be pushed onto the stack.
    858   // -----------------------------------
    859   Label stack_overflow;
    860   // We need two scratch registers. Push edi and edx onto stack.
    861   __ Push(edi);
    862   __ Push(edx);
    863 
    864   // Push arguments and move return address to the top of stack.
    865   // The eax register is readonly. The ecx register will be modified. The edx
    866   // and edi registers will be modified but restored to their original values.
    867   Generate_InterpreterPushArgsAndReturnAddress(masm, eax, ecx, edx, edi, false,
    868                                                2, &stack_overflow);
    869 
    870   // Restore edi and edx
    871   __ Pop(edx);
    872   __ Pop(edi);
    873 
    874   __ AssertUndefinedOrAllocationSite(ebx);
    875   if (mode == InterpreterPushArgsMode::kJSFunction) {
    876     // Tail call to the function-specific construct stub (still in the caller
    877     // context at this point).
    878     __ AssertFunction(edi);
    879 
    880     __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
    881     __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kConstructStubOffset));
    882     __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize));
    883     __ jmp(ecx);
    884   } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
    885     // Call the constructor with unmodified eax, edi, edx values.
    886     __ Jump(masm->isolate()->builtins()->ConstructWithSpread(),
    887             RelocInfo::CODE_TARGET);
    888   } else {
    889     DCHECK_EQ(InterpreterPushArgsMode::kOther, mode);
    890     // Call the constructor with unmodified eax, edi, edx values.
    891     __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
    892   }
    893 
    894   __ bind(&stack_overflow);
    895   {
    896     // Pop the temporary registers, so that return address is on top of stack.
    897     __ Pop(edx);
    898     __ Pop(edi);
    899 
    900     __ TailCallRuntime(Runtime::kThrowStackOverflow);
    901 
    902     // This should be unreachable.
    903     __ int3();
    904   }
    905 }
    906 
    907 // static
    908 void Builtins::Generate_InterpreterPushArgsAndConstructArray(
    909     MacroAssembler* masm) {
    910   // ----------- S t a t e -------------
    911   //  -- eax : the number of arguments (not including the receiver)
    912   //  -- edx : the target to call checked to be Array function.
    913   //  -- ebx : the allocation site feedback
    914   //  -- ecx : the address of the first argument to be pushed. Subsequent
    915   //           arguments should be consecutive above this, in the same order as
    916   //           they are to be pushed onto the stack.
    917   // -----------------------------------
    918   Label stack_overflow;
    919   // We need two scratch registers. Register edi is available, push edx onto
    920   // stack.
    921   __ Push(edx);
    922 
    923   // Push arguments and move return address to the top of stack.
    924   // The eax register is readonly. The ecx register will be modified. The edx
    925   // and edi registers will be modified but restored to their original values.
    926   Generate_InterpreterPushArgsAndReturnAddress(masm, eax, ecx, edx, edi, true,
    927                                                1, &stack_overflow);
    928 
    929   // Restore edx.
    930   __ Pop(edx);
    931 
    932   // Array constructor expects constructor in edi. It is same as edx here.
    933   __ Move(edi, edx);
    934 
    935   ArrayConstructorStub stub(masm->isolate());
    936   __ TailCallStub(&stub);
    937 
    938   __ bind(&stack_overflow);
    939   {
    940     // Pop the temporary registers, so that return address is on top of stack.
    941     __ Pop(edx);
    942 
    943     __ TailCallRuntime(Runtime::kThrowStackOverflow);
    944 
    945     // This should be unreachable.
    946     __ int3();
    947   }
    948 }
    949 
    950 static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
    951   // Set the return address to the correct point in the interpreter entry
    952   // trampoline.
    953   Smi* interpreter_entry_return_pc_offset(
    954       masm->isolate()->heap()->interpreter_entry_return_pc_offset());
    955   DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
    956   __ LoadHeapObject(ebx,
    957                     masm->isolate()->builtins()->InterpreterEntryTrampoline());
    958   __ add(ebx, Immediate(interpreter_entry_return_pc_offset->value() +
    959                         Code::kHeaderSize - kHeapObjectTag));
    960   __ push(ebx);
    961 
    962   // Initialize the dispatch table register.
    963   __ mov(kInterpreterDispatchTableRegister,
    964          Immediate(ExternalReference::interpreter_dispatch_table_address(
    965              masm->isolate())));
    966 
    967   // Get the bytecode array pointer from the frame.
    968   __ mov(kInterpreterBytecodeArrayRegister,
    969          Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
    970 
    971   if (FLAG_debug_code) {
    972     // Check function data field is actually a BytecodeArray object.
    973     __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
    974     __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
    975                      ebx);
    976     __ Assert(equal, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
    977   }
    978 
    979   // Get the target bytecode offset from the frame.
    980   __ mov(kInterpreterBytecodeOffsetRegister,
    981          Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
    982   __ SmiUntag(kInterpreterBytecodeOffsetRegister);
    983 
    984   // Dispatch to the target bytecode.
    985   __ movzx_b(ebx, Operand(kInterpreterBytecodeArrayRegister,
    986                           kInterpreterBytecodeOffsetRegister, times_1, 0));
    987   __ mov(ebx, Operand(kInterpreterDispatchTableRegister, ebx,
    988                       times_pointer_size, 0));
    989   __ jmp(ebx);
    990 }
    991 
    992 void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
    993   // Advance the current bytecode offset stored within the given interpreter
    994   // stack frame. This simulates what all bytecode handlers do upon completion
    995   // of the underlying operation.
    996   __ mov(ebx, Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
    997   __ mov(edx, Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
    998   __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
    999   {
   1000     FrameScope scope(masm, StackFrame::INTERNAL);
   1001     __ Push(kInterpreterAccumulatorRegister);
   1002     __ Push(ebx);  // First argument is the bytecode array.
   1003     __ Push(edx);  // Second argument is the bytecode offset.
   1004     __ CallRuntime(Runtime::kInterpreterAdvanceBytecodeOffset);
   1005     __ Move(edx, eax);  // Result is the new bytecode offset.
   1006     __ Pop(kInterpreterAccumulatorRegister);
   1007   }
   1008   __ mov(Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp), edx);
   1009 
   1010   Generate_InterpreterEnterBytecode(masm);
   1011 }
   1012 
   1013 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
   1014   Generate_InterpreterEnterBytecode(masm);
   1015 }
   1016 
   1017 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
   1018   // ----------- S t a t e -------------
   1019   //  -- eax : argument count (preserved for callee)
   1020   //  -- edx : new target (preserved for callee)
   1021   //  -- edi : target function (preserved for callee)
   1022   // -----------------------------------
   1023   // First lookup code, maybe we don't need to compile!
   1024   Label gotta_call_runtime, gotta_call_runtime_no_stack;
   1025   Label try_shared;
   1026   Label loop_top, loop_bottom;
   1027 
   1028   Register closure = edi;
   1029   Register new_target = edx;
   1030   Register argument_count = eax;
   1031 
   1032   // Do we have a valid feedback vector?
   1033   __ mov(ebx, FieldOperand(closure, JSFunction::kFeedbackVectorOffset));
   1034   __ mov(ebx, FieldOperand(ebx, Cell::kValueOffset));
   1035   __ cmp(ebx, masm->isolate()->factory()->undefined_value());
   1036   __ j(equal, &gotta_call_runtime_no_stack);
   1037 
   1038   __ push(argument_count);
   1039   __ push(new_target);
   1040   __ push(closure);
   1041 
   1042   Register map = argument_count;
   1043   Register index = ebx;
   1044   __ mov(map, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
   1045   __ mov(map, FieldOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
   1046   __ mov(index, FieldOperand(map, FixedArray::kLengthOffset));
   1047   __ cmp(index, Immediate(Smi::FromInt(2)));
   1048   __ j(less, &try_shared);
   1049 
   1050   // edx : native context
   1051   // ebx : length / index
   1052   // eax : optimized code map
   1053   // stack[0] : new target
   1054   // stack[4] : closure
   1055   Register native_context = edx;
   1056   __ mov(native_context, NativeContextOperand());
   1057 
   1058   __ bind(&loop_top);
   1059   Register temp = edi;
   1060 
   1061   // Does the native context match?
   1062   __ mov(temp, FieldOperand(map, index, times_half_pointer_size,
   1063                             SharedFunctionInfo::kOffsetToPreviousContext));
   1064   __ mov(temp, FieldOperand(temp, WeakCell::kValueOffset));
   1065   __ cmp(temp, native_context);
   1066   __ j(not_equal, &loop_bottom);
   1067   // Code available?
   1068   Register entry = ecx;
   1069   __ mov(entry, FieldOperand(map, index, times_half_pointer_size,
   1070                              SharedFunctionInfo::kOffsetToPreviousCachedCode));
   1071   __ mov(entry, FieldOperand(entry, WeakCell::kValueOffset));
   1072   __ JumpIfSmi(entry, &try_shared);
   1073 
   1074   // Found code. Get it into the closure and return.
   1075   __ pop(closure);
   1076   // Store code entry in the closure.
   1077   __ lea(entry, FieldOperand(entry, Code::kHeaderSize));
   1078   __ mov(FieldOperand(closure, JSFunction::kCodeEntryOffset), entry);
   1079   __ RecordWriteCodeEntryField(closure, entry, eax);
   1080 
   1081   // Link the closure into the optimized function list.
   1082   // ecx : code entry
   1083   // edx : native context
   1084   // edi : closure
   1085   __ mov(ebx,
   1086          ContextOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
   1087   __ mov(FieldOperand(closure, JSFunction::kNextFunctionLinkOffset), ebx);
   1088   __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, ebx, eax,
   1089                       kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
   1090   const int function_list_offset =
   1091       Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST);
   1092   __ mov(ContextOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST),
   1093          closure);
   1094   // Save closure before the write barrier.
   1095   __ mov(ebx, closure);
   1096   __ RecordWriteContextSlot(native_context, function_list_offset, closure, eax,
   1097                             kDontSaveFPRegs);
   1098   __ mov(closure, ebx);
   1099   __ pop(new_target);
   1100   __ pop(argument_count);
   1101   __ jmp(entry);
   1102 
   1103   __ bind(&loop_bottom);
   1104   __ sub(index, Immediate(Smi::FromInt(SharedFunctionInfo::kEntryLength)));
   1105   __ cmp(index, Immediate(Smi::FromInt(1)));
   1106   __ j(greater, &loop_top);
   1107 
   1108   // We found no code.
   1109   __ jmp(&gotta_call_runtime);
   1110 
   1111   __ bind(&try_shared);
   1112   __ pop(closure);
   1113   __ pop(new_target);
   1114   __ pop(argument_count);
   1115   __ mov(entry, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
   1116   // Is the shared function marked for tier up?
   1117   __ test_b(FieldOperand(entry, SharedFunctionInfo::kMarkedForTierUpByteOffset),
   1118             Immediate(1 << SharedFunctionInfo::kMarkedForTierUpBitWithinByte));
   1119   __ j(not_zero, &gotta_call_runtime_no_stack);
   1120 
   1121   // If SFI points to anything other than CompileLazy, install that.
   1122   __ mov(entry, FieldOperand(entry, SharedFunctionInfo::kCodeOffset));
   1123   __ Move(ebx, masm->CodeObject());
   1124   __ cmp(entry, ebx);
   1125   __ j(equal, &gotta_call_runtime_no_stack);
   1126 
   1127   // Install the SFI's code entry.
   1128   __ lea(entry, FieldOperand(entry, Code::kHeaderSize));
   1129   __ mov(FieldOperand(closure, JSFunction::kCodeEntryOffset), entry);
   1130   __ RecordWriteCodeEntryField(closure, entry, ebx);
   1131   __ jmp(entry);
   1132 
   1133   __ bind(&gotta_call_runtime);
   1134   __ pop(closure);
   1135   __ pop(new_target);
   1136   __ pop(argument_count);
   1137   __ bind(&gotta_call_runtime_no_stack);
   1138 
   1139   GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
   1140 }
   1141 
   1142 void Builtins::Generate_CompileBaseline(MacroAssembler* masm) {
   1143   GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline);
   1144 }
   1145 
   1146 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
   1147   GenerateTailCallToReturnedCode(masm,
   1148                                  Runtime::kCompileOptimized_NotConcurrent);
   1149 }
   1150 
   1151 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
   1152   GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
   1153 }
   1154 
   1155 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
   1156   // ----------- S t a t e -------------
   1157   //  -- eax : argument count (preserved for callee)
   1158   //  -- edx : new target (preserved for callee)
   1159   //  -- edi : target function (preserved for callee)
   1160   // -----------------------------------
   1161   Label failed;
   1162   {
   1163     FrameScope scope(masm, StackFrame::INTERNAL);
   1164     // Preserve argument count for later compare.
   1165     __ mov(ecx, eax);
   1166     // Push the number of arguments to the callee.
   1167     __ SmiTag(eax);
   1168     __ push(eax);
   1169     // Push a copy of the target function and the new target.
   1170     __ push(edi);
   1171     __ push(edx);
   1172 
   1173     // The function.
   1174     __ push(edi);
   1175     // Copy arguments from caller (stdlib, foreign, heap).
   1176     Label args_done;
   1177     for (int j = 0; j < 4; ++j) {
   1178       Label over;
   1179       if (j < 3) {
   1180         __ cmp(ecx, Immediate(j));
   1181         __ j(not_equal, &over, Label::kNear);
   1182       }
   1183       for (int i = j - 1; i >= 0; --i) {
   1184         __ Push(Operand(
   1185             ebp, StandardFrameConstants::kCallerSPOffset + i * kPointerSize));
   1186       }
   1187       for (int i = 0; i < 3 - j; ++i) {
   1188         __ PushRoot(Heap::kUndefinedValueRootIndex);
   1189       }
   1190       if (j < 3) {
   1191         __ jmp(&args_done, Label::kNear);
   1192         __ bind(&over);
   1193       }
   1194     }
   1195     __ bind(&args_done);
   1196 
   1197     // Call runtime, on success unwind frame, and parent frame.
   1198     __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
   1199     // A smi 0 is returned on failure, an object on success.
   1200     __ JumpIfSmi(eax, &failed, Label::kNear);
   1201 
   1202     __ Drop(2);
   1203     __ Pop(ecx);
   1204     __ SmiUntag(ecx);
   1205     scope.GenerateLeaveFrame();
   1206 
   1207     __ PopReturnAddressTo(ebx);
   1208     __ inc(ecx);
   1209     __ lea(esp, Operand(esp, ecx, times_pointer_size, 0));
   1210     __ PushReturnAddressFrom(ebx);
   1211     __ ret(0);
   1212 
   1213     __ bind(&failed);
   1214     // Restore target function and new target.
   1215     __ pop(edx);
   1216     __ pop(edi);
   1217     __ pop(eax);
   1218     __ SmiUntag(eax);
   1219   }
   1220   // On failure, tail call back to regular js.
   1221   GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
   1222 }
   1223 
   1224 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
   1225   // For now, we are relying on the fact that make_code_young doesn't do any
   1226   // garbage collection which allows us to save/restore the registers without
   1227   // worrying about which of them contain pointers. We also don't build an
   1228   // internal frame to make the code faster, since we shouldn't have to do stack
   1229   // crawls in MakeCodeYoung. This seems a bit fragile.
   1230 
   1231   // Re-execute the code that was patched back to the young age when
   1232   // the stub returns.
   1233   __ sub(Operand(esp, 0), Immediate(5));
   1234   __ pushad();
   1235   __ mov(eax, Operand(esp, 8 * kPointerSize));
   1236   {
   1237     FrameScope scope(masm, StackFrame::MANUAL);
   1238     __ PrepareCallCFunction(2, ebx);
   1239     __ mov(Operand(esp, 1 * kPointerSize),
   1240            Immediate(ExternalReference::isolate_address(masm->isolate())));
   1241     __ mov(Operand(esp, 0), eax);
   1242     __ CallCFunction(
   1243         ExternalReference::get_make_code_young_function(masm->isolate()), 2);
   1244   }
   1245   __ popad();
   1246   __ ret(0);
   1247 }
   1248 
   1249 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C)                              \
   1250   void Builtins::Generate_Make##C##CodeYoungAgain(MacroAssembler* masm) { \
   1251     GenerateMakeCodeYoungAgainCommon(masm);                               \
   1252   }
   1253 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
   1254 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
   1255 
   1256 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
   1257   // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
   1258   // that make_code_young doesn't do any garbage collection which allows us to
   1259   // save/restore the registers without worrying about which of them contain
   1260   // pointers.
   1261   __ pushad();
   1262   __ mov(eax, Operand(esp, 8 * kPointerSize));
   1263   __ sub(eax, Immediate(Assembler::kCallInstructionLength));
   1264   {  // NOLINT
   1265     FrameScope scope(masm, StackFrame::MANUAL);
   1266     __ PrepareCallCFunction(2, ebx);
   1267     __ mov(Operand(esp, 1 * kPointerSize),
   1268            Immediate(ExternalReference::isolate_address(masm->isolate())));
   1269     __ mov(Operand(esp, 0), eax);
   1270     __ CallCFunction(
   1271         ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
   1272         2);
   1273   }
   1274   __ popad();
   1275 
   1276   // Perform prologue operations usually performed by the young code stub.
   1277   __ pop(eax);   // Pop return address into scratch register.
   1278   __ push(ebp);  // Caller's frame pointer.
   1279   __ mov(ebp, esp);
   1280   __ push(esi);  // Callee's context.
   1281   __ push(edi);  // Callee's JS Function.
   1282   __ push(eax);  // Push return address after frame prologue.
   1283 
   1284   // Jump to point after the code-age stub.
   1285   __ ret(0);
   1286 }
   1287 
   1288 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
   1289   GenerateMakeCodeYoungAgainCommon(masm);
   1290 }
   1291 
   1292 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
   1293   Generate_MarkCodeAsExecutedOnce(masm);
   1294 }
   1295 
   1296 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
   1297                                              SaveFPRegsMode save_doubles) {
   1298   // Enter an internal frame.
   1299   {
   1300     FrameScope scope(masm, StackFrame::INTERNAL);
   1301 
   1302     // Preserve registers across notification, this is important for compiled
   1303     // stubs that tail call the runtime on deopts passing their parameters in
   1304     // registers.
   1305     __ pushad();
   1306     __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
   1307     __ popad();
   1308     // Tear down internal frame.
   1309   }
   1310 
   1311   __ pop(MemOperand(esp, 0));  // Ignore state offset
   1312   __ ret(0);  // Return to IC Miss stub, continuation still on stack.
   1313 }
   1314 
   1315 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
   1316   Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
   1317 }
   1318 
   1319 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
   1320   Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
   1321 }
   1322 
   1323 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
   1324                                              Deoptimizer::BailoutType type) {
   1325   {
   1326     FrameScope scope(masm, StackFrame::INTERNAL);
   1327 
   1328     // Pass deoptimization type to the runtime system.
   1329     __ push(Immediate(Smi::FromInt(static_cast<int>(type))));
   1330     __ CallRuntime(Runtime::kNotifyDeoptimized);
   1331 
   1332     // Tear down internal frame.
   1333   }
   1334 
   1335   // Get the full codegen state from the stack and untag it.
   1336   __ mov(ecx, Operand(esp, 1 * kPointerSize));
   1337   __ SmiUntag(ecx);
   1338 
   1339   // Switch on the state.
   1340   Label not_no_registers, not_tos_eax;
   1341   __ cmp(ecx, static_cast<int>(Deoptimizer::BailoutState::NO_REGISTERS));
   1342   __ j(not_equal, &not_no_registers, Label::kNear);
   1343   __ ret(1 * kPointerSize);  // Remove state.
   1344 
   1345   __ bind(&not_no_registers);
   1346   DCHECK_EQ(kInterpreterAccumulatorRegister.code(), eax.code());
   1347   __ mov(eax, Operand(esp, 2 * kPointerSize));
   1348   __ cmp(ecx, static_cast<int>(Deoptimizer::BailoutState::TOS_REGISTER));
   1349   __ j(not_equal, &not_tos_eax, Label::kNear);
   1350   __ ret(2 * kPointerSize);  // Remove state, eax.
   1351 
   1352   __ bind(&not_tos_eax);
   1353   __ Abort(kNoCasesLeft);
   1354 }
   1355 
   1356 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
   1357   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
   1358 }
   1359 
   1360 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
   1361   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
   1362 }
   1363 
   1364 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
   1365   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
   1366 }
   1367 
   1368 // static
   1369 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
   1370   // ----------- S t a t e -------------
   1371   //  -- eax     : argc
   1372   //  -- esp[0]  : return address
   1373   //  -- esp[4]  : argArray
   1374   //  -- esp[8]  : thisArg
   1375   //  -- esp[12] : receiver
   1376   // -----------------------------------
   1377 
   1378   // 1. Load receiver into edi, argArray into eax (if present), remove all
   1379   // arguments from the stack (including the receiver), and push thisArg (if
   1380   // present) instead.
   1381   {
   1382     Label no_arg_array, no_this_arg;
   1383     __ LoadRoot(edx, Heap::kUndefinedValueRootIndex);
   1384     __ mov(ebx, edx);
   1385     __ mov(edi, Operand(esp, eax, times_pointer_size, kPointerSize));
   1386     __ test(eax, eax);
   1387     __ j(zero, &no_this_arg, Label::kNear);
   1388     {
   1389       __ mov(edx, Operand(esp, eax, times_pointer_size, 0));
   1390       __ cmp(eax, Immediate(1));
   1391       __ j(equal, &no_arg_array, Label::kNear);
   1392       __ mov(ebx, Operand(esp, eax, times_pointer_size, -kPointerSize));
   1393       __ bind(&no_arg_array);
   1394     }
   1395     __ bind(&no_this_arg);
   1396     __ PopReturnAddressTo(ecx);
   1397     __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
   1398     __ Push(edx);
   1399     __ PushReturnAddressFrom(ecx);
   1400     __ Move(eax, ebx);
   1401   }
   1402 
   1403   // ----------- S t a t e -------------
   1404   //  -- eax    : argArray
   1405   //  -- edi    : receiver
   1406   //  -- esp[0] : return address
   1407   //  -- esp[4] : thisArg
   1408   // -----------------------------------
   1409 
   1410   // 2. Make sure the receiver is actually callable.
   1411   Label receiver_not_callable;
   1412   __ JumpIfSmi(edi, &receiver_not_callable, Label::kNear);
   1413   __ mov(ecx, FieldOperand(edi, HeapObject::kMapOffset));
   1414   __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
   1415             Immediate(1 << Map::kIsCallable));
   1416   __ j(zero, &receiver_not_callable, Label::kNear);
   1417 
   1418   // 3. Tail call with no arguments if argArray is null or undefined.
   1419   Label no_arguments;
   1420   __ JumpIfRoot(eax, Heap::kNullValueRootIndex, &no_arguments, Label::kNear);
   1421   __ JumpIfRoot(eax, Heap::kUndefinedValueRootIndex, &no_arguments,
   1422                 Label::kNear);
   1423 
   1424   // 4a. Apply the receiver to the given argArray (passing undefined for
   1425   // new.target).
   1426   __ LoadRoot(edx, Heap::kUndefinedValueRootIndex);
   1427   __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
   1428 
   1429   // 4b. The argArray is either null or undefined, so we tail call without any
   1430   // arguments to the receiver.
   1431   __ bind(&no_arguments);
   1432   {
   1433     __ Set(eax, 0);
   1434     __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
   1435   }
   1436 
   1437   // 4c. The receiver is not callable, throw an appropriate TypeError.
   1438   __ bind(&receiver_not_callable);
   1439   {
   1440     __ mov(Operand(esp, kPointerSize), edi);
   1441     __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
   1442   }
   1443 }
   1444 
   1445 // static
   1446 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
   1447   // Stack Layout:
   1448   // esp[0]           : Return address
   1449   // esp[8]           : Argument n
   1450   // esp[16]          : Argument n-1
   1451   //  ...
   1452   // esp[8 * n]       : Argument 1
   1453   // esp[8 * (n + 1)] : Receiver (callable to call)
   1454   //
   1455   // eax contains the number of arguments, n, not counting the receiver.
   1456   //
   1457   // 1. Make sure we have at least one argument.
   1458   {
   1459     Label done;
   1460     __ test(eax, eax);
   1461     __ j(not_zero, &done, Label::kNear);
   1462     __ PopReturnAddressTo(ebx);
   1463     __ PushRoot(Heap::kUndefinedValueRootIndex);
   1464     __ PushReturnAddressFrom(ebx);
   1465     __ inc(eax);
   1466     __ bind(&done);
   1467   }
   1468 
   1469   // 2. Get the callable to call (passed as receiver) from the stack.
   1470   __ mov(edi, Operand(esp, eax, times_pointer_size, kPointerSize));
   1471 
   1472   // 3. Shift arguments and return address one slot down on the stack
   1473   //    (overwriting the original receiver).  Adjust argument count to make
   1474   //    the original first argument the new receiver.
   1475   {
   1476     Label loop;
   1477     __ mov(ecx, eax);
   1478     __ bind(&loop);
   1479     __ mov(ebx, Operand(esp, ecx, times_pointer_size, 0));
   1480     __ mov(Operand(esp, ecx, times_pointer_size, kPointerSize), ebx);
   1481     __ dec(ecx);
   1482     __ j(not_sign, &loop);  // While non-negative (to copy return address).
   1483     __ pop(ebx);            // Discard copy of return address.
   1484     __ dec(eax);  // One fewer argument (first argument is new receiver).
   1485   }
   1486 
   1487   // 4. Call the callable.
   1488   __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
   1489 }
   1490 
   1491 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
   1492   // ----------- S t a t e -------------
   1493   //  -- eax     : argc
   1494   //  -- esp[0]  : return address
   1495   //  -- esp[4]  : argumentsList
   1496   //  -- esp[8]  : thisArgument
   1497   //  -- esp[12] : target
   1498   //  -- esp[16] : receiver
   1499   // -----------------------------------
   1500 
   1501   // 1. Load target into edi (if present), argumentsList into eax (if present),
   1502   // remove all arguments from the stack (including the receiver), and push
   1503   // thisArgument (if present) instead.
   1504   {
   1505     Label done;
   1506     __ LoadRoot(edi, Heap::kUndefinedValueRootIndex);
   1507     __ mov(edx, edi);
   1508     __ mov(ebx, edi);
   1509     __ cmp(eax, Immediate(1));
   1510     __ j(below, &done, Label::kNear);
   1511     __ mov(edi, Operand(esp, eax, times_pointer_size, -0 * kPointerSize));
   1512     __ j(equal, &done, Label::kNear);
   1513     __ mov(edx, Operand(esp, eax, times_pointer_size, -1 * kPointerSize));
   1514     __ cmp(eax, Immediate(3));
   1515     __ j(below, &done, Label::kNear);
   1516     __ mov(ebx, Operand(esp, eax, times_pointer_size, -2 * kPointerSize));
   1517     __ bind(&done);
   1518     __ PopReturnAddressTo(ecx);
   1519     __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
   1520     __ Push(edx);
   1521     __ PushReturnAddressFrom(ecx);
   1522     __ Move(eax, ebx);
   1523   }
   1524 
   1525   // ----------- S t a t e -------------
   1526   //  -- eax    : argumentsList
   1527   //  -- edi    : target
   1528   //  -- esp[0] : return address
   1529   //  -- esp[4] : thisArgument
   1530   // -----------------------------------
   1531 
   1532   // 2. Make sure the target is actually callable.
   1533   Label target_not_callable;
   1534   __ JumpIfSmi(edi, &target_not_callable, Label::kNear);
   1535   __ mov(ecx, FieldOperand(edi, HeapObject::kMapOffset));
   1536   __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
   1537             Immediate(1 << Map::kIsCallable));
   1538   __ j(zero, &target_not_callable, Label::kNear);
   1539 
   1540   // 3a. Apply the target to the given argumentsList (passing undefined for
   1541   // new.target).
   1542   __ LoadRoot(edx, Heap::kUndefinedValueRootIndex);
   1543   __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
   1544 
   1545   // 3b. The target is not callable, throw an appropriate TypeError.
   1546   __ bind(&target_not_callable);
   1547   {
   1548     __ mov(Operand(esp, kPointerSize), edi);
   1549     __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
   1550   }
   1551 }
   1552 
   1553 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
   1554   // ----------- S t a t e -------------
   1555   //  -- eax     : argc
   1556   //  -- esp[0]  : return address
   1557   //  -- esp[4]  : new.target (optional)
   1558   //  -- esp[8]  : argumentsList
   1559   //  -- esp[12] : target
   1560   //  -- esp[16] : receiver
   1561   // -----------------------------------
   1562 
   1563   // 1. Load target into edi (if present), argumentsList into eax (if present),
   1564   // new.target into edx (if present, otherwise use target), remove all
   1565   // arguments from the stack (including the receiver), and push thisArgument
   1566   // (if present) instead.
   1567   {
   1568     Label done;
   1569     __ LoadRoot(edi, Heap::kUndefinedValueRootIndex);
   1570     __ mov(edx, edi);
   1571     __ mov(ebx, edi);
   1572     __ cmp(eax, Immediate(1));
   1573     __ j(below, &done, Label::kNear);
   1574     __ mov(edi, Operand(esp, eax, times_pointer_size, -0 * kPointerSize));
   1575     __ mov(edx, edi);
   1576     __ j(equal, &done, Label::kNear);
   1577     __ mov(ebx, Operand(esp, eax, times_pointer_size, -1 * kPointerSize));
   1578     __ cmp(eax, Immediate(3));
   1579     __ j(below, &done, Label::kNear);
   1580     __ mov(edx, Operand(esp, eax, times_pointer_size, -2 * kPointerSize));
   1581     __ bind(&done);
   1582     __ PopReturnAddressTo(ecx);
   1583     __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
   1584     __ PushRoot(Heap::kUndefinedValueRootIndex);
   1585     __ PushReturnAddressFrom(ecx);
   1586     __ Move(eax, ebx);
   1587   }
   1588 
   1589   // ----------- S t a t e -------------
   1590   //  -- eax    : argumentsList
   1591   //  -- edx    : new.target
   1592   //  -- edi    : target
   1593   //  -- esp[0] : return address
   1594   //  -- esp[4] : receiver (undefined)
   1595   // -----------------------------------
   1596 
   1597   // 2. Make sure the target is actually a constructor.
   1598   Label target_not_constructor;
   1599   __ JumpIfSmi(edi, &target_not_constructor, Label::kNear);
   1600   __ mov(ecx, FieldOperand(edi, HeapObject::kMapOffset));
   1601   __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
   1602             Immediate(1 << Map::kIsConstructor));
   1603   __ j(zero, &target_not_constructor, Label::kNear);
   1604 
   1605   // 3. Make sure the target is actually a constructor.
   1606   Label new_target_not_constructor;
   1607   __ JumpIfSmi(edx, &new_target_not_constructor, Label::kNear);
   1608   __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
   1609   __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
   1610             Immediate(1 << Map::kIsConstructor));
   1611   __ j(zero, &new_target_not_constructor, Label::kNear);
   1612 
   1613   // 4a. Construct the target with the given new.target and argumentsList.
   1614   __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
   1615 
   1616   // 4b. The target is not a constructor, throw an appropriate TypeError.
   1617   __ bind(&target_not_constructor);
   1618   {
   1619     __ mov(Operand(esp, kPointerSize), edi);
   1620     __ TailCallRuntime(Runtime::kThrowNotConstructor);
   1621   }
   1622 
   1623   // 4c. The new.target is not a constructor, throw an appropriate TypeError.
   1624   __ bind(&new_target_not_constructor);
   1625   {
   1626     __ mov(Operand(esp, kPointerSize), edx);
   1627     __ TailCallRuntime(Runtime::kThrowNotConstructor);
   1628   }
   1629 }
   1630 
   1631 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
   1632   // ----------- S t a t e -------------
   1633   //  -- eax : argc
   1634   //  -- esp[0] : return address
   1635   //  -- esp[4] : last argument
   1636   // -----------------------------------
   1637   Label generic_array_code;
   1638 
   1639   // Get the InternalArray function.
   1640   __ LoadGlobalFunction(Context::INTERNAL_ARRAY_FUNCTION_INDEX, edi);
   1641 
   1642   if (FLAG_debug_code) {
   1643     // Initial map for the builtin InternalArray function should be a map.
   1644     __ mov(ebx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
   1645     // Will both indicate a NULL and a Smi.
   1646     __ test(ebx, Immediate(kSmiTagMask));
   1647     __ Assert(not_zero, kUnexpectedInitialMapForInternalArrayFunction);
   1648     __ CmpObjectType(ebx, MAP_TYPE, ecx);
   1649     __ Assert(equal, kUnexpectedInitialMapForInternalArrayFunction);
   1650   }
   1651 
   1652   // Run the native code for the InternalArray function called as a normal
   1653   // function.
   1654   // tail call a stub
   1655   InternalArrayConstructorStub stub(masm->isolate());
   1656   __ TailCallStub(&stub);
   1657 }
   1658 
   1659 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
   1660   // ----------- S t a t e -------------
   1661   //  -- eax : argc
   1662   //  -- esp[0] : return address
   1663   //  -- esp[4] : last argument
   1664   // -----------------------------------
   1665   Label generic_array_code;
   1666 
   1667   // Get the Array function.
   1668   __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, edi);
   1669   __ mov(edx, edi);
   1670 
   1671   if (FLAG_debug_code) {
   1672     // Initial map for the builtin Array function should be a map.
   1673     __ mov(ebx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
   1674     // Will both indicate a NULL and a Smi.
   1675     __ test(ebx, Immediate(kSmiTagMask));
   1676     __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
   1677     __ CmpObjectType(ebx, MAP_TYPE, ecx);
   1678     __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
   1679   }
   1680 
   1681   // Run the native code for the Array function called as a normal function.
   1682   // tail call a stub
   1683   __ mov(ebx, masm->isolate()->factory()->undefined_value());
   1684   ArrayConstructorStub stub(masm->isolate());
   1685   __ TailCallStub(&stub);
   1686 }
   1687 
   1688 // static
   1689 void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
   1690   // ----------- S t a t e -------------
   1691   //  -- eax                 : number of arguments
   1692   //  -- edi                 : function
   1693   //  -- esi                 : context
   1694   //  -- esp[0]              : return address
   1695   //  -- esp[(argc - n) * 8] : arg[n] (zero-based)
   1696   //  -- esp[(argc + 1) * 8] : receiver
   1697   // -----------------------------------
   1698   Condition const cc = (kind == MathMaxMinKind::kMin) ? below : above;
   1699   Heap::RootListIndex const root_index =
   1700       (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
   1701                                      : Heap::kMinusInfinityValueRootIndex;
   1702   const int reg_sel = (kind == MathMaxMinKind::kMin) ? 1 : 0;
   1703 
   1704   // Load the accumulator with the default return value (either -Infinity or
   1705   // +Infinity), with the tagged value in edx and the double value in stx_0.
   1706   __ LoadRoot(edx, root_index);
   1707   __ fld_d(FieldOperand(edx, HeapNumber::kValueOffset));
   1708   __ Move(ecx, eax);
   1709 
   1710   Label done_loop, loop;
   1711   __ bind(&loop);
   1712   {
   1713     // Check if all parameters done.
   1714     __ test(ecx, ecx);
   1715     __ j(zero, &done_loop);
   1716 
   1717     // Load the next parameter tagged value into ebx.
   1718     __ mov(ebx, Operand(esp, ecx, times_pointer_size, 0));
   1719 
   1720     // Load the double value of the parameter into stx_1, maybe converting the
   1721     // parameter to a number first using the ToNumber builtin if necessary.
   1722     Label convert, convert_smi, convert_number, done_convert;
   1723     __ bind(&convert);
   1724     __ JumpIfSmi(ebx, &convert_smi);
   1725     __ JumpIfRoot(FieldOperand(ebx, HeapObject::kMapOffset),
   1726                   Heap::kHeapNumberMapRootIndex, &convert_number);
   1727     {
   1728       // Parameter is not a Number, use the ToNumber builtin to convert it.
   1729       FrameScope scope(masm, StackFrame::MANUAL);
   1730       __ SmiTag(eax);
   1731       __ SmiTag(ecx);
   1732       __ EnterBuiltinFrame(esi, edi, eax);
   1733       __ Push(ecx);
   1734       __ Push(edx);
   1735       __ mov(eax, ebx);
   1736       __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
   1737       __ mov(ebx, eax);
   1738       __ Pop(edx);
   1739       __ Pop(ecx);
   1740       __ LeaveBuiltinFrame(esi, edi, eax);
   1741       __ SmiUntag(ecx);
   1742       __ SmiUntag(eax);
   1743       {
   1744         // Restore the double accumulator value (stX_0).
   1745         Label restore_smi, done_restore;
   1746         __ JumpIfSmi(edx, &restore_smi, Label::kNear);
   1747         __ fld_d(FieldOperand(edx, HeapNumber::kValueOffset));
   1748         __ jmp(&done_restore, Label::kNear);
   1749         __ bind(&restore_smi);
   1750         __ SmiUntag(edx);
   1751         __ push(edx);
   1752         __ fild_s(Operand(esp, 0));
   1753         __ pop(edx);
   1754         __ SmiTag(edx);
   1755         __ bind(&done_restore);
   1756       }
   1757     }
   1758     __ jmp(&convert);
   1759     __ bind(&convert_number);
   1760     // Load another value into stx_1
   1761     __ fld_d(FieldOperand(ebx, HeapNumber::kValueOffset));
   1762     __ fxch();
   1763     __ jmp(&done_convert, Label::kNear);
   1764     __ bind(&convert_smi);
   1765     __ SmiUntag(ebx);
   1766     __ push(ebx);
   1767     __ fild_s(Operand(esp, 0));
   1768     __ pop(ebx);
   1769     __ fxch();
   1770     __ SmiTag(ebx);
   1771     __ bind(&done_convert);
   1772 
   1773     // Perform the actual comparison with the accumulator value on the left hand
   1774     // side (stx_0) and the next parameter value on the right hand side (stx_1).
   1775     Label compare_equal, compare_nan, compare_swap, done_compare;
   1776 
   1777     // Duplicates the 2 float data for FCmp
   1778     __ fld(1);
   1779     __ fld(1);
   1780     __ FCmp();
   1781     __ j(parity_even, &compare_nan, Label::kNear);
   1782     __ j(cc, &done_compare, Label::kNear);
   1783     __ j(equal, &compare_equal, Label::kNear);
   1784 
   1785     // Result is on the right hand side(stx_0).
   1786     __ bind(&compare_swap);
   1787     __ fxch();
   1788     __ mov(edx, ebx);
   1789     __ jmp(&done_compare, Label::kNear);
   1790 
   1791     // At least one side is NaN, which means that the result will be NaN too.
   1792     __ bind(&compare_nan);
   1793     // Set the result on the right hand side (stx_0) to nan
   1794     __ fstp(0);
   1795     __ LoadRoot(edx, Heap::kNanValueRootIndex);
   1796     __ fld_d(FieldOperand(edx, HeapNumber::kValueOffset));
   1797     __ jmp(&done_compare, Label::kNear);
   1798 
   1799     // Left and right hand side are equal, check for -0 vs. +0.
   1800     __ bind(&compare_equal);
   1801     // Check the sign of the value in reg_sel
   1802     __ fld(reg_sel);
   1803     __ FXamSign();
   1804     __ j(not_zero, &compare_swap);
   1805 
   1806     __ bind(&done_compare);
   1807     // The right result is on the right hand side(stx_0)
   1808     // and can remove the useless stx_1 now.
   1809     __ fxch();
   1810     __ fstp(0);
   1811     __ dec(ecx);
   1812     __ jmp(&loop);
   1813   }
   1814 
   1815   __ bind(&done_loop);
   1816   __ PopReturnAddressTo(ecx);
   1817   __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
   1818   __ PushReturnAddressFrom(ecx);
   1819   __ mov(eax, edx);
   1820   __ Ret();
   1821 }
   1822 
   1823 // static
   1824 void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
   1825   // ----------- S t a t e -------------
   1826   //  -- eax                 : number of arguments
   1827   //  -- edi                 : constructor function
   1828   //  -- esi                 : context
   1829   //  -- esp[0]              : return address
   1830   //  -- esp[(argc - n) * 4] : arg[n] (zero-based)
   1831   //  -- esp[(argc + 1) * 4] : receiver
   1832   // -----------------------------------
   1833 
   1834   // 1. Load the first argument into ebx.
   1835   Label no_arguments;
   1836   {
   1837     __ test(eax, eax);
   1838     __ j(zero, &no_arguments, Label::kNear);
   1839     __ mov(ebx, Operand(esp, eax, times_pointer_size, 0));
   1840   }
   1841 
   1842   // 2a. Convert the first argument to a number.
   1843   {
   1844     FrameScope scope(masm, StackFrame::MANUAL);
   1845     __ SmiTag(eax);
   1846     __ EnterBuiltinFrame(esi, edi, eax);
   1847     __ mov(eax, ebx);
   1848     __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
   1849     __ LeaveBuiltinFrame(esi, edi, ebx);  // Argc popped to ebx.
   1850     __ SmiUntag(ebx);
   1851   }
   1852 
   1853   {
   1854     // Drop all arguments including the receiver.
   1855     __ PopReturnAddressTo(ecx);
   1856     __ lea(esp, Operand(esp, ebx, times_pointer_size, kPointerSize));
   1857     __ PushReturnAddressFrom(ecx);
   1858     __ Ret();
   1859   }
   1860 
   1861   // 2b. No arguments, return +0 (already in eax).
   1862   __ bind(&no_arguments);
   1863   __ ret(1 * kPointerSize);
   1864 }
   1865 
   1866 // static
   1867 void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
   1868   // ----------- S t a t e -------------
   1869   //  -- eax                 : number of arguments
   1870   //  -- edi                 : constructor function
   1871   //  -- edx                 : new target
   1872   //  -- esi                 : context
   1873   //  -- esp[0]              : return address
   1874   //  -- esp[(argc - n) * 4] : arg[n] (zero-based)
   1875   //  -- esp[(argc + 1) * 4] : receiver
   1876   // -----------------------------------
   1877 
   1878   // 1. Make sure we operate in the context of the called function.
   1879   __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
   1880 
   1881   // Store argc in r8.
   1882   __ mov(ecx, eax);
   1883   __ SmiTag(ecx);
   1884 
   1885   // 2. Load the first argument into ebx.
   1886   {
   1887     Label no_arguments, done;
   1888     __ test(eax, eax);
   1889     __ j(zero, &no_arguments, Label::kNear);
   1890     __ mov(ebx, Operand(esp, eax, times_pointer_size, 0));
   1891     __ jmp(&done, Label::kNear);
   1892     __ bind(&no_arguments);
   1893     __ Move(ebx, Smi::kZero);
   1894     __ bind(&done);
   1895   }
   1896 
   1897   // 3. Make sure ebx is a number.
   1898   {
   1899     Label done_convert;
   1900     __ JumpIfSmi(ebx, &done_convert);
   1901     __ CompareRoot(FieldOperand(ebx, HeapObject::kMapOffset),
   1902                    Heap::kHeapNumberMapRootIndex);
   1903     __ j(equal, &done_convert);
   1904     {
   1905       FrameScope scope(masm, StackFrame::MANUAL);
   1906       __ EnterBuiltinFrame(esi, edi, ecx);
   1907       __ Push(edx);
   1908       __ Move(eax, ebx);
   1909       __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
   1910       __ Move(ebx, eax);
   1911       __ Pop(edx);
   1912       __ LeaveBuiltinFrame(esi, edi, ecx);
   1913     }
   1914     __ bind(&done_convert);
   1915   }
   1916 
   1917   // 4. Check if new target and constructor differ.
   1918   Label drop_frame_and_ret, done_alloc, new_object;
   1919   __ cmp(edx, edi);
   1920   __ j(not_equal, &new_object);
   1921 
   1922   // 5. Allocate a JSValue wrapper for the number.
   1923   __ AllocateJSValue(eax, edi, ebx, esi, &done_alloc);
   1924   __ jmp(&drop_frame_and_ret);
   1925 
   1926   __ bind(&done_alloc);
   1927   __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));  // Restore esi.
   1928 
   1929   // 6. Fallback to the runtime to create new object.
   1930   __ bind(&new_object);
   1931   {
   1932     FrameScope scope(masm, StackFrame::MANUAL);
   1933     __ EnterBuiltinFrame(esi, edi, ecx);
   1934     __ Push(ebx);  // the first argument
   1935     __ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
   1936             RelocInfo::CODE_TARGET);
   1937     __ Pop(FieldOperand(eax, JSValue::kValueOffset));
   1938     __ LeaveBuiltinFrame(esi, edi, ecx);
   1939   }
   1940 
   1941   __ bind(&drop_frame_and_ret);
   1942   {
   1943     // Drop all arguments including the receiver.
   1944     __ PopReturnAddressTo(esi);
   1945     __ SmiUntag(ecx);
   1946     __ lea(esp, Operand(esp, ecx, times_pointer_size, kPointerSize));
   1947     __ PushReturnAddressFrom(esi);
   1948     __ Ret();
   1949   }
   1950 }
   1951 
   1952 // static
   1953 void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
   1954   // ----------- S t a t e -------------
   1955   //  -- eax                 : number of arguments
   1956   //  -- edi                 : constructor function
   1957   //  -- esi                 : context
   1958   //  -- esp[0]              : return address
   1959   //  -- esp[(argc - n) * 4] : arg[n] (zero-based)
   1960   //  -- esp[(argc + 1) * 4] : receiver
   1961   // -----------------------------------
   1962 
   1963   // 1. Load the first argument into eax.
   1964   Label no_arguments;
   1965   {
   1966     __ mov(ebx, eax);  // Store argc in ebx.
   1967     __ test(eax, eax);
   1968     __ j(zero, &no_arguments, Label::kNear);
   1969     __ mov(eax, Operand(esp, eax, times_pointer_size, 0));
   1970   }
   1971 
   1972   // 2a. At least one argument, return eax if it's a string, otherwise
   1973   // dispatch to appropriate conversion.
   1974   Label drop_frame_and_ret, to_string, symbol_descriptive_string;
   1975   {
   1976     __ JumpIfSmi(eax, &to_string, Label::kNear);
   1977     STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
   1978     __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edx);
   1979     __ j(above, &to_string, Label::kNear);
   1980     __ j(equal, &symbol_descriptive_string, Label::kNear);
   1981     __ jmp(&drop_frame_and_ret, Label::kNear);
   1982   }
   1983 
   1984   // 2b. No arguments, return the empty string (and pop the receiver).
   1985   __ bind(&no_arguments);
   1986   {
   1987     __ LoadRoot(eax, Heap::kempty_stringRootIndex);
   1988     __ ret(1 * kPointerSize);
   1989   }
   1990 
   1991   // 3a. Convert eax to a string.
   1992   __ bind(&to_string);
   1993   {
   1994     FrameScope scope(masm, StackFrame::MANUAL);
   1995     __ SmiTag(ebx);
   1996     __ EnterBuiltinFrame(esi, edi, ebx);
   1997     __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
   1998     __ LeaveBuiltinFrame(esi, edi, ebx);
   1999     __ SmiUntag(ebx);
   2000   }
   2001   __ jmp(&drop_frame_and_ret, Label::kNear);
   2002 
   2003   // 3b. Convert symbol in eax to a string.
   2004   __ bind(&symbol_descriptive_string);
   2005   {
   2006     __ PopReturnAddressTo(ecx);
   2007     __ lea(esp, Operand(esp, ebx, times_pointer_size, kPointerSize));
   2008     __ Push(eax);
   2009     __ PushReturnAddressFrom(ecx);
   2010     __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
   2011   }
   2012 
   2013   __ bind(&drop_frame_and_ret);
   2014   {
   2015     // Drop all arguments including the receiver.
   2016     __ PopReturnAddressTo(ecx);
   2017     __ lea(esp, Operand(esp, ebx, times_pointer_size, kPointerSize));
   2018     __ PushReturnAddressFrom(ecx);
   2019     __ Ret();
   2020   }
   2021 }
   2022 
   2023 // static
   2024 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
   2025   // ----------- S t a t e -------------
   2026   //  -- eax                 : number of arguments
   2027   //  -- edi                 : constructor function
   2028   //  -- edx                 : new target
   2029   //  -- esi                 : context
   2030   //  -- esp[0]              : return address
   2031   //  -- esp[(argc - n) * 4] : arg[n] (zero-based)
   2032   //  -- esp[(argc + 1) * 4] : receiver
   2033   // -----------------------------------
   2034 
   2035   // 1. Make sure we operate in the context of the called function.
   2036   __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
   2037 
   2038   __ mov(ebx, eax);
   2039 
   2040   // 2. Load the first argument into eax.
   2041   {
   2042     Label no_arguments, done;
   2043     __ test(ebx, ebx);
   2044     __ j(zero, &no_arguments, Label::kNear);
   2045     __ mov(eax, Operand(esp, ebx, times_pointer_size, 0));
   2046     __ jmp(&done, Label::kNear);
   2047     __ bind(&no_arguments);
   2048     __ LoadRoot(eax, Heap::kempty_stringRootIndex);
   2049     __ bind(&done);
   2050   }
   2051 
   2052   // 3. Make sure eax is a string.
   2053   {
   2054     Label convert, done_convert;
   2055     __ JumpIfSmi(eax, &convert, Label::kNear);
   2056     __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, ecx);
   2057     __ j(below, &done_convert);
   2058     __ bind(&convert);
   2059     {
   2060       FrameScope scope(masm, StackFrame::MANUAL);
   2061       __ SmiTag(ebx);
   2062       __ EnterBuiltinFrame(esi, edi, ebx);
   2063       __ Push(edx);
   2064       __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
   2065       __ Pop(edx);
   2066       __ LeaveBuiltinFrame(esi, edi, ebx);
   2067       __ SmiUntag(ebx);
   2068     }
   2069     __ bind(&done_convert);
   2070   }
   2071 
   2072   // 4. Check if new target and constructor differ.
   2073   Label drop_frame_and_ret, done_alloc, new_object;
   2074   __ cmp(edx, edi);
   2075   __ j(not_equal, &new_object);
   2076 
   2077   // 5. Allocate a JSValue wrapper for the string.
   2078   // AllocateJSValue can't handle src == dst register. Reuse esi and restore it
   2079   // as needed after the call.
   2080   __ mov(esi, eax);
   2081   __ AllocateJSValue(eax, edi, esi, ecx, &done_alloc);
   2082   __ jmp(&drop_frame_and_ret);
   2083 
   2084   __ bind(&done_alloc);
   2085   {
   2086     // Restore eax to the first argument and esi to the context.
   2087     __ mov(eax, esi);
   2088     __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
   2089   }
   2090 
   2091   // 6. Fallback to the runtime to create new object.
   2092   __ bind(&new_object);
   2093   {
   2094     FrameScope scope(masm, StackFrame::MANUAL);
   2095     __ SmiTag(ebx);
   2096     __ EnterBuiltinFrame(esi, edi, ebx);
   2097     __ Push(eax);  // the first argument
   2098     __ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
   2099             RelocInfo::CODE_TARGET);
   2100     __ Pop(FieldOperand(eax, JSValue::kValueOffset));
   2101     __ LeaveBuiltinFrame(esi, edi, ebx);
   2102     __ SmiUntag(ebx);
   2103   }
   2104 
   2105   __ bind(&drop_frame_and_ret);
   2106   {
   2107     // Drop all arguments including the receiver.
   2108     __ PopReturnAddressTo(ecx);
   2109     __ lea(esp, Operand(esp, ebx, times_pointer_size, kPointerSize));
   2110     __ PushReturnAddressFrom(ecx);
   2111     __ Ret();
   2112   }
   2113 }
   2114 
   2115 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
   2116   __ push(ebp);
   2117   __ mov(ebp, esp);
   2118 
   2119   // Store the arguments adaptor context sentinel.
   2120   __ push(Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
   2121 
   2122   // Push the function on the stack.
   2123   __ push(edi);
   2124 
   2125   // Preserve the number of arguments on the stack. Must preserve eax,
   2126   // ebx and ecx because these registers are used when copying the
   2127   // arguments and the receiver.
   2128   STATIC_ASSERT(kSmiTagSize == 1);
   2129   __ lea(edi, Operand(eax, eax, times_1, kSmiTag));
   2130   __ push(edi);
   2131 }
   2132 
   2133 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
   2134   // Retrieve the number of arguments from the stack.
   2135   __ mov(ebx, Operand(ebp, ArgumentsAdaptorFrameConstants::kLengthOffset));
   2136 
   2137   // Leave the frame.
   2138   __ leave();
   2139 
   2140   // Remove caller arguments from the stack.
   2141   STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
   2142   __ pop(ecx);
   2143   __ lea(esp, Operand(esp, ebx, times_2, 1 * kPointerSize));  // 1 ~ receiver
   2144   __ push(ecx);
   2145 }
   2146 
   2147 // static
   2148 void Builtins::Generate_Apply(MacroAssembler* masm) {
   2149   // ----------- S t a t e -------------
   2150   //  -- eax    : argumentsList
   2151   //  -- edi    : target
   2152   //  -- edx    : new.target (checked to be constructor or undefined)
   2153   //  -- esp[0] : return address.
   2154   //  -- esp[4] : thisArgument
   2155   // -----------------------------------
   2156 
   2157   // Create the list of arguments from the array-like argumentsList.
   2158   {
   2159     Label create_arguments, create_array, create_holey_array, create_runtime,
   2160         done_create;
   2161     __ JumpIfSmi(eax, &create_runtime);
   2162 
   2163     // Load the map of argumentsList into ecx.
   2164     __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
   2165 
   2166     // Load native context into ebx.
   2167     __ mov(ebx, NativeContextOperand());
   2168 
   2169     // Check if argumentsList is an (unmodified) arguments object.
   2170     __ cmp(ecx, ContextOperand(ebx, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
   2171     __ j(equal, &create_arguments);
   2172     __ cmp(ecx, ContextOperand(ebx, Context::STRICT_ARGUMENTS_MAP_INDEX));
   2173     __ j(equal, &create_arguments);
   2174 
   2175     // Check if argumentsList is a fast JSArray.
   2176     __ CmpInstanceType(ecx, JS_ARRAY_TYPE);
   2177     __ j(equal, &create_array);
   2178 
   2179     // Ask the runtime to create the list (actually a FixedArray).
   2180     __ bind(&create_runtime);
   2181     {
   2182       FrameScope scope(masm, StackFrame::INTERNAL);
   2183       __ Push(edi);
   2184       __ Push(edx);
   2185       __ Push(eax);
   2186       __ CallRuntime(Runtime::kCreateListFromArrayLike);
   2187       __ Pop(edx);
   2188       __ Pop(edi);
   2189       __ mov(ebx, FieldOperand(eax, FixedArray::kLengthOffset));
   2190       __ SmiUntag(ebx);
   2191     }
   2192     __ jmp(&done_create);
   2193 
   2194     // Try to create the list from an arguments object.
   2195     __ bind(&create_arguments);
   2196     __ mov(ebx, FieldOperand(eax, JSArgumentsObject::kLengthOffset));
   2197     __ mov(ecx, FieldOperand(eax, JSObject::kElementsOffset));
   2198     __ cmp(ebx, FieldOperand(ecx, FixedArray::kLengthOffset));
   2199     __ j(not_equal, &create_runtime);
   2200     __ SmiUntag(ebx);
   2201     __ mov(eax, ecx);
   2202     __ jmp(&done_create);
   2203 
   2204     // For holey JSArrays we need to check that the array prototype chain
   2205     // protector is intact and our prototype is the Array.prototype actually.
   2206     __ bind(&create_holey_array);
   2207     __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
   2208     __ mov(ecx, FieldOperand(ecx, Map::kPrototypeOffset));
   2209     __ cmp(ecx, ContextOperand(ebx, Context::INITIAL_ARRAY_PROTOTYPE_INDEX));
   2210     __ j(not_equal, &create_runtime);
   2211     __ LoadRoot(ecx, Heap::kArrayProtectorRootIndex);
   2212     __ cmp(FieldOperand(ecx, PropertyCell::kValueOffset),
   2213            Immediate(Smi::FromInt(Isolate::kProtectorValid)));
   2214     __ j(not_equal, &create_runtime);
   2215     __ mov(ebx, FieldOperand(eax, JSArray::kLengthOffset));
   2216     __ SmiUntag(ebx);
   2217     __ mov(eax, FieldOperand(eax, JSArray::kElementsOffset));
   2218     __ jmp(&done_create);
   2219 
   2220     // Try to create the list from a JSArray object.
   2221     __ bind(&create_array);
   2222     __ mov(ecx, FieldOperand(ecx, Map::kBitField2Offset));
   2223     __ DecodeField<Map::ElementsKindBits>(ecx);
   2224     STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
   2225     STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
   2226     STATIC_ASSERT(FAST_ELEMENTS == 2);
   2227     STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
   2228     __ cmp(ecx, Immediate(FAST_HOLEY_SMI_ELEMENTS));
   2229     __ j(equal, &create_holey_array, Label::kNear);
   2230     __ cmp(ecx, Immediate(FAST_HOLEY_ELEMENTS));
   2231     __ j(equal, &create_holey_array, Label::kNear);
   2232     __ j(above, &create_runtime);
   2233     __ mov(ebx, FieldOperand(eax, JSArray::kLengthOffset));
   2234     __ SmiUntag(ebx);
   2235     __ mov(eax, FieldOperand(eax, JSArray::kElementsOffset));
   2236 
   2237     __ bind(&done_create);
   2238   }
   2239 
   2240   // Check for stack overflow.
   2241   {
   2242     // Check the stack for overflow. We are not trying to catch interruptions
   2243     // (i.e. debug break and preemption) here, so check the "real stack limit".
   2244     Label done;
   2245     ExternalReference real_stack_limit =
   2246         ExternalReference::address_of_real_stack_limit(masm->isolate());
   2247     __ mov(ecx, Operand::StaticVariable(real_stack_limit));
   2248     // Make ecx the space we have left. The stack might already be overflowed
   2249     // here which will cause ecx to become negative.
   2250     __ neg(ecx);
   2251     __ add(ecx, esp);
   2252     __ sar(ecx, kPointerSizeLog2);
   2253     // Check if the arguments will overflow the stack.
   2254     __ cmp(ecx, ebx);
   2255     __ j(greater, &done, Label::kNear);  // Signed comparison.
   2256     __ TailCallRuntime(Runtime::kThrowStackOverflow);
   2257     __ bind(&done);
   2258   }
   2259 
   2260   // ----------- S t a t e -------------
   2261   //  -- edi    : target
   2262   //  -- eax    : args (a FixedArray built from argumentsList)
   2263   //  -- ebx    : len (number of elements to push from args)
   2264   //  -- edx    : new.target (checked to be constructor or undefined)
   2265   //  -- esp[0] : return address.
   2266   //  -- esp[4] : thisArgument
   2267   // -----------------------------------
   2268 
   2269   // Push arguments onto the stack (thisArgument is already on the stack).
   2270   {
   2271     // Save edx/edi to stX0/stX1.
   2272     __ push(edx);
   2273     __ push(edi);
   2274     __ fld_s(MemOperand(esp, 0));
   2275     __ fld_s(MemOperand(esp, 4));
   2276     __ lea(esp, Operand(esp, 2 * kFloatSize));
   2277 
   2278     __ PopReturnAddressTo(edx);
   2279     __ Move(ecx, Immediate(0));
   2280     Label done, push, loop;
   2281     __ bind(&loop);
   2282     __ cmp(ecx, ebx);
   2283     __ j(equal, &done, Label::kNear);
   2284     // Turn the hole into undefined as we go.
   2285     __ mov(edi,
   2286            FieldOperand(eax, ecx, times_pointer_size, FixedArray::kHeaderSize));
   2287     __ CompareRoot(edi, Heap::kTheHoleValueRootIndex);
   2288     __ j(not_equal, &push, Label::kNear);
   2289     __ LoadRoot(edi, Heap::kUndefinedValueRootIndex);
   2290     __ bind(&push);
   2291     __ Push(edi);
   2292     __ inc(ecx);
   2293     __ jmp(&loop);
   2294     __ bind(&done);
   2295     __ PushReturnAddressFrom(edx);
   2296 
   2297     // Restore edx/edi from stX0/stX1.
   2298     __ lea(esp, Operand(esp, -2 * kFloatSize));
   2299     __ fstp_s(MemOperand(esp, 0));
   2300     __ fstp_s(MemOperand(esp, 4));
   2301     __ pop(edx);
   2302     __ pop(edi);
   2303 
   2304     __ Move(eax, ebx);
   2305   }
   2306 
   2307   // Dispatch to Call or Construct depending on whether new.target is undefined.
   2308   {
   2309     __ CompareRoot(edx, Heap::kUndefinedValueRootIndex);
   2310     __ j(equal, masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
   2311     __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
   2312   }
   2313 }
   2314 
   2315 namespace {
   2316 
   2317 // Drops top JavaScript frame and an arguments adaptor frame below it (if
   2318 // present) preserving all the arguments prepared for current call.
   2319 // Does nothing if debugger is currently active.
   2320 // ES6 14.6.3. PrepareForTailCall
   2321 //
   2322 // Stack structure for the function g() tail calling f():
   2323 //
   2324 // ------- Caller frame: -------
   2325 // |  ...
   2326 // |  g()'s arg M
   2327 // |  ...
   2328 // |  g()'s arg 1
   2329 // |  g()'s receiver arg
   2330 // |  g()'s caller pc
   2331 // ------- g()'s frame: -------
   2332 // |  g()'s caller fp      <- fp
   2333 // |  g()'s context
   2334 // |  function pointer: g
   2335 // |  -------------------------
   2336 // |  ...
   2337 // |  ...
   2338 // |  f()'s arg N
   2339 // |  ...
   2340 // |  f()'s arg 1
   2341 // |  f()'s receiver arg
   2342 // |  f()'s caller pc      <- sp
   2343 // ----------------------
   2344 //
   2345 void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
   2346                         Register scratch1, Register scratch2,
   2347                         Register scratch3) {
   2348   DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
   2349   Comment cmnt(masm, "[ PrepareForTailCall");
   2350 
   2351   // Prepare for tail call only if ES2015 tail call elimination is enabled.
   2352   Label done;
   2353   ExternalReference is_tail_call_elimination_enabled =
   2354       ExternalReference::is_tail_call_elimination_enabled_address(
   2355           masm->isolate());
   2356   __ movzx_b(scratch1,
   2357              Operand::StaticVariable(is_tail_call_elimination_enabled));
   2358   __ cmp(scratch1, Immediate(0));
   2359   __ j(equal, &done, Label::kNear);
   2360 
   2361   // Drop possible interpreter handler/stub frame.
   2362   {
   2363     Label no_interpreter_frame;
   2364     __ cmp(Operand(ebp, CommonFrameConstants::kContextOrFrameTypeOffset),
   2365            Immediate(Smi::FromInt(StackFrame::STUB)));
   2366     __ j(not_equal, &no_interpreter_frame, Label::kNear);
   2367     __ mov(ebp, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
   2368     __ bind(&no_interpreter_frame);
   2369   }
   2370 
   2371   // Check if next frame is an arguments adaptor frame.
   2372   Register caller_args_count_reg = scratch1;
   2373   Label no_arguments_adaptor, formal_parameter_count_loaded;
   2374   __ mov(scratch2, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
   2375   __ cmp(Operand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset),
   2376          Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
   2377   __ j(not_equal, &no_arguments_adaptor, Label::kNear);
   2378 
   2379   // Drop current frame and load arguments count from arguments adaptor frame.
   2380   __ mov(ebp, scratch2);
   2381   __ mov(caller_args_count_reg,
   2382          Operand(ebp, ArgumentsAdaptorFrameConstants::kLengthOffset));
   2383   __ SmiUntag(caller_args_count_reg);
   2384   __ jmp(&formal_parameter_count_loaded, Label::kNear);
   2385 
   2386   __ bind(&no_arguments_adaptor);
   2387   // Load caller's formal parameter count
   2388   __ mov(scratch1, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
   2389   __ mov(scratch1,
   2390          FieldOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
   2391   __ mov(
   2392       caller_args_count_reg,
   2393       FieldOperand(scratch1, SharedFunctionInfo::kFormalParameterCountOffset));
   2394   __ SmiUntag(caller_args_count_reg);
   2395 
   2396   __ bind(&formal_parameter_count_loaded);
   2397 
   2398   ParameterCount callee_args_count(args_reg);
   2399   __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
   2400                         scratch3, ReturnAddressState::kOnStack, 0);
   2401   __ bind(&done);
   2402 }
   2403 }  // namespace
   2404 
   2405 // static
   2406 void Builtins::Generate_CallFunction(MacroAssembler* masm,
   2407                                      ConvertReceiverMode mode,
   2408                                      TailCallMode tail_call_mode) {
   2409   // ----------- S t a t e -------------
   2410   //  -- eax : the number of arguments (not including the receiver)
   2411   //  -- edi : the function to call (checked to be a JSFunction)
   2412   // -----------------------------------
   2413   __ AssertFunction(edi);
   2414 
   2415   // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
   2416   // Check that the function is not a "classConstructor".
   2417   Label class_constructor;
   2418   __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
   2419   __ test_b(FieldOperand(edx, SharedFunctionInfo::kFunctionKindByteOffset),
   2420             Immediate(SharedFunctionInfo::kClassConstructorBitsWithinByte));
   2421   __ j(not_zero, &class_constructor);
   2422 
   2423   // Enter the context of the function; ToObject has to run in the function
   2424   // context, and we also need to take the global proxy from the function
   2425   // context in case of conversion.
   2426   STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset ==
   2427                 SharedFunctionInfo::kStrictModeByteOffset);
   2428   __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
   2429   // We need to convert the receiver for non-native sloppy mode functions.
   2430   Label done_convert;
   2431   __ test_b(FieldOperand(edx, SharedFunctionInfo::kNativeByteOffset),
   2432             Immediate((1 << SharedFunctionInfo::kNativeBitWithinByte) |
   2433                       (1 << SharedFunctionInfo::kStrictModeBitWithinByte)));
   2434   __ j(not_zero, &done_convert);
   2435   {
   2436     // ----------- S t a t e -------------
   2437     //  -- eax : the number of arguments (not including the receiver)
   2438     //  -- edx : the shared function info.
   2439     //  -- edi : the function to call (checked to be a JSFunction)
   2440     //  -- esi : the function context.
   2441     // -----------------------------------
   2442 
   2443     if (mode == ConvertReceiverMode::kNullOrUndefined) {
   2444       // Patch receiver to global proxy.
   2445       __ LoadGlobalProxy(ecx);
   2446     } else {
   2447       Label convert_to_object, convert_receiver;
   2448       __ mov(ecx, Operand(esp, eax, times_pointer_size, kPointerSize));
   2449       __ JumpIfSmi(ecx, &convert_to_object, Label::kNear);
   2450       STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
   2451       __ CmpObjectType(ecx, FIRST_JS_RECEIVER_TYPE, ebx);
   2452       __ j(above_equal, &done_convert);
   2453       if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
   2454         Label convert_global_proxy;
   2455         __ JumpIfRoot(ecx, Heap::kUndefinedValueRootIndex,
   2456                       &convert_global_proxy, Label::kNear);
   2457         __ JumpIfNotRoot(ecx, Heap::kNullValueRootIndex, &convert_to_object,
   2458                          Label::kNear);
   2459         __ bind(&convert_global_proxy);
   2460         {
   2461           // Patch receiver to global proxy.
   2462           __ LoadGlobalProxy(ecx);
   2463         }
   2464         __ jmp(&convert_receiver);
   2465       }
   2466       __ bind(&convert_to_object);
   2467       {
   2468         // Convert receiver using ToObject.
   2469         // TODO(bmeurer): Inline the allocation here to avoid building the frame
   2470         // in the fast case? (fall back to AllocateInNewSpace?)
   2471         FrameScope scope(masm, StackFrame::INTERNAL);
   2472         __ SmiTag(eax);
   2473         __ Push(eax);
   2474         __ Push(edi);
   2475         __ mov(eax, ecx);
   2476         __ Push(esi);
   2477         __ Call(masm->isolate()->builtins()->ToObject(),
   2478                 RelocInfo::CODE_TARGET);
   2479         __ Pop(esi);
   2480         __ mov(ecx, eax);
   2481         __ Pop(edi);
   2482         __ Pop(eax);
   2483         __ SmiUntag(eax);
   2484       }
   2485       __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
   2486       __ bind(&convert_receiver);
   2487     }
   2488     __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), ecx);
   2489   }
   2490   __ bind(&done_convert);
   2491 
   2492   // ----------- S t a t e -------------
   2493   //  -- eax : the number of arguments (not including the receiver)
   2494   //  -- edx : the shared function info.
   2495   //  -- edi : the function to call (checked to be a JSFunction)
   2496   //  -- esi : the function context.
   2497   // -----------------------------------
   2498 
   2499   if (tail_call_mode == TailCallMode::kAllow) {
   2500     PrepareForTailCall(masm, eax, ebx, ecx, edx);
   2501     // Reload shared function info.
   2502     __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
   2503   }
   2504 
   2505   __ mov(ebx,
   2506          FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
   2507   __ SmiUntag(ebx);
   2508   ParameterCount actual(eax);
   2509   ParameterCount expected(ebx);
   2510   __ InvokeFunctionCode(edi, no_reg, expected, actual, JUMP_FUNCTION,
   2511                         CheckDebugStepCallWrapper());
   2512   // The function is a "classConstructor", need to raise an exception.
   2513   __ bind(&class_constructor);
   2514   {
   2515     FrameScope frame(masm, StackFrame::INTERNAL);
   2516     __ push(edi);
   2517     __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
   2518   }
   2519 }
   2520 
   2521 namespace {
   2522 
   2523 void Generate_PushBoundArguments(MacroAssembler* masm) {
   2524   // ----------- S t a t e -------------
   2525   //  -- eax : the number of arguments (not including the receiver)
   2526   //  -- edx : new.target (only in case of [[Construct]])
   2527   //  -- edi : target (checked to be a JSBoundFunction)
   2528   // -----------------------------------
   2529 
   2530   // Load [[BoundArguments]] into ecx and length of that into ebx.
   2531   Label no_bound_arguments;
   2532   __ mov(ecx, FieldOperand(edi, JSBoundFunction::kBoundArgumentsOffset));
   2533   __ mov(ebx, FieldOperand(ecx, FixedArray::kLengthOffset));
   2534   __ SmiUntag(ebx);
   2535   __ test(ebx, ebx);
   2536   __ j(zero, &no_bound_arguments);
   2537   {
   2538     // ----------- S t a t e -------------
   2539     //  -- eax : the number of arguments (not including the receiver)
   2540     //  -- edx : new.target (only in case of [[Construct]])
   2541     //  -- edi : target (checked to be a JSBoundFunction)
   2542     //  -- ecx : the [[BoundArguments]] (implemented as FixedArray)
   2543     //  -- ebx : the number of [[BoundArguments]]
   2544     // -----------------------------------
   2545 
   2546     // Reserve stack space for the [[BoundArguments]].
   2547     {
   2548       Label done;
   2549       __ lea(ecx, Operand(ebx, times_pointer_size, 0));
   2550       __ sub(esp, ecx);
   2551       // Check the stack for overflow. We are not trying to catch interruptions
   2552       // (i.e. debug break and preemption) here, so check the "real stack
   2553       // limit".
   2554       __ CompareRoot(esp, ecx, Heap::kRealStackLimitRootIndex);
   2555       __ j(greater, &done, Label::kNear);  // Signed comparison.
   2556       // Restore the stack pointer.
   2557       __ lea(esp, Operand(esp, ebx, times_pointer_size, 0));
   2558       {
   2559         FrameScope scope(masm, StackFrame::MANUAL);
   2560         __ EnterFrame(StackFrame::INTERNAL);
   2561         __ CallRuntime(Runtime::kThrowStackOverflow);
   2562       }
   2563       __ bind(&done);
   2564     }
   2565 
   2566     // Adjust effective number of arguments to include return address.
   2567     __ inc(eax);
   2568 
   2569     // Relocate arguments and return address down the stack.
   2570     {
   2571       Label loop;
   2572       __ Set(ecx, 0);
   2573       __ lea(ebx, Operand(esp, ebx, times_pointer_size, 0));
   2574       __ bind(&loop);
   2575       __ fld_s(Operand(ebx, ecx, times_pointer_size, 0));
   2576       __ fstp_s(Operand(esp, ecx, times_pointer_size, 0));
   2577       __ inc(ecx);
   2578       __ cmp(ecx, eax);
   2579       __ j(less, &loop);
   2580     }
   2581 
   2582     // Copy [[BoundArguments]] to the stack (below the arguments).
   2583     {
   2584       Label loop;
   2585       __ mov(ecx, FieldOperand(edi, JSBoundFunction::kBoundArgumentsOffset));
   2586       __ mov(ebx, FieldOperand(ecx, FixedArray::kLengthOffset));
   2587       __ SmiUntag(ebx);
   2588       __ bind(&loop);
   2589       __ dec(ebx);
   2590       __ fld_s(
   2591           FieldOperand(ecx, ebx, times_pointer_size, FixedArray::kHeaderSize));
   2592       __ fstp_s(Operand(esp, eax, times_pointer_size, 0));
   2593       __ lea(eax, Operand(eax, 1));
   2594       __ j(greater, &loop);
   2595     }
   2596 
   2597     // Adjust effective number of arguments (eax contains the number of
   2598     // arguments from the call plus return address plus the number of
   2599     // [[BoundArguments]]), so we need to subtract one for the return address.
   2600     __ dec(eax);
   2601   }
   2602   __ bind(&no_bound_arguments);
   2603 }
   2604 
   2605 }  // namespace
   2606 
   2607 // static
   2608 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
   2609                                               TailCallMode tail_call_mode) {
   2610   // ----------- S t a t e -------------
   2611   //  -- eax : the number of arguments (not including the receiver)
   2612   //  -- edi : the function to call (checked to be a JSBoundFunction)
   2613   // -----------------------------------
   2614   __ AssertBoundFunction(edi);
   2615 
   2616   if (tail_call_mode == TailCallMode::kAllow) {
   2617     PrepareForTailCall(masm, eax, ebx, ecx, edx);
   2618   }
   2619 
   2620   // Patch the receiver to [[BoundThis]].
   2621   __ mov(ebx, FieldOperand(edi, JSBoundFunction::kBoundThisOffset));
   2622   __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), ebx);
   2623 
   2624   // Push the [[BoundArguments]] onto the stack.
   2625   Generate_PushBoundArguments(masm);
   2626 
   2627   // Call the [[BoundTargetFunction]] via the Call builtin.
   2628   __ mov(edi, FieldOperand(edi, JSBoundFunction::kBoundTargetFunctionOffset));
   2629   __ mov(ecx, Operand::StaticVariable(ExternalReference(
   2630                   Builtins::kCall_ReceiverIsAny, masm->isolate())));
   2631   __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize));
   2632   __ jmp(ecx);
   2633 }
   2634 
   2635 // static
   2636 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
   2637                              TailCallMode tail_call_mode) {
   2638   // ----------- S t a t e -------------
   2639   //  -- eax : the number of arguments (not including the receiver)
   2640   //  -- edi : the target to call (can be any Object).
   2641   // -----------------------------------
   2642 
   2643   Label non_callable, non_function, non_smi;
   2644   __ JumpIfSmi(edi, &non_callable);
   2645   __ bind(&non_smi);
   2646   __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
   2647   __ j(equal, masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
   2648        RelocInfo::CODE_TARGET);
   2649   __ CmpInstanceType(ecx, JS_BOUND_FUNCTION_TYPE);
   2650   __ j(equal, masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
   2651        RelocInfo::CODE_TARGET);
   2652 
   2653   // Check if target has a [[Call]] internal method.
   2654   __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
   2655             Immediate(1 << Map::kIsCallable));
   2656   __ j(zero, &non_callable);
   2657 
   2658   __ CmpInstanceType(ecx, JS_PROXY_TYPE);
   2659   __ j(not_equal, &non_function);
   2660 
   2661   // 0. Prepare for tail call if necessary.
   2662   if (tail_call_mode == TailCallMode::kAllow) {
   2663     PrepareForTailCall(masm, eax, ebx, ecx, edx);
   2664   }
   2665 
   2666   // 1. Runtime fallback for Proxy [[Call]].
   2667   __ PopReturnAddressTo(ecx);
   2668   __ Push(edi);
   2669   __ PushReturnAddressFrom(ecx);
   2670   // Increase the arguments size to include the pushed function and the
   2671   // existing receiver on the stack.
   2672   __ add(eax, Immediate(2));
   2673   // Tail-call to the runtime.
   2674   __ JumpToExternalReference(
   2675       ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
   2676 
   2677   // 2. Call to something else, which might have a [[Call]] internal method (if
   2678   // not we raise an exception).
   2679   __ bind(&non_function);
   2680   // Overwrite the original receiver with the (original) target.
   2681   __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), edi);
   2682   // Let the "call_as_function_delegate" take care of the rest.
   2683   __ LoadGlobalFunction(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, edi);
   2684   __ Jump(masm->isolate()->builtins()->CallFunction(
   2685               ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
   2686           RelocInfo::CODE_TARGET);
   2687 
   2688   // 3. Call to something that is not callable.
   2689   __ bind(&non_callable);
   2690   {
   2691     FrameScope scope(masm, StackFrame::INTERNAL);
   2692     __ Push(edi);
   2693     __ CallRuntime(Runtime::kThrowCalledNonCallable);
   2694   }
   2695 }
   2696 
   2697 static void CheckSpreadAndPushToStack(MacroAssembler* masm) {
   2698   // Free up some registers.
   2699   // Save edx/edi to stX0/stX1.
   2700   __ push(edx);
   2701   __ push(edi);
   2702   __ fld_s(MemOperand(esp, 0));
   2703   __ fld_s(MemOperand(esp, 4));
   2704   __ lea(esp, Operand(esp, 2 * kFloatSize));
   2705 
   2706   Register argc = eax;
   2707 
   2708   Register scratch = ecx;
   2709   Register scratch2 = edi;
   2710 
   2711   Register spread = ebx;
   2712   Register spread_map = edx;
   2713 
   2714   Register spread_len = edx;
   2715 
   2716   Label runtime_call, push_args;
   2717   __ mov(spread, Operand(esp, kPointerSize));
   2718   __ JumpIfSmi(spread, &runtime_call);
   2719   __ mov(spread_map, FieldOperand(spread, HeapObject::kMapOffset));
   2720 
   2721   // Check that the spread is an array.
   2722   __ CmpInstanceType(spread_map, JS_ARRAY_TYPE);
   2723   __ j(not_equal, &runtime_call);
   2724 
   2725   // Check that we have the original ArrayPrototype.
   2726   __ mov(scratch, FieldOperand(spread_map, Map::kPrototypeOffset));
   2727   __ mov(scratch2, NativeContextOperand());
   2728   __ cmp(scratch,
   2729          ContextOperand(scratch2, Context::INITIAL_ARRAY_PROTOTYPE_INDEX));
   2730   __ j(not_equal, &runtime_call);
   2731 
   2732   // Check that the ArrayPrototype hasn't been modified in a way that would
   2733   // affect iteration.
   2734   __ LoadRoot(scratch, Heap::kArrayIteratorProtectorRootIndex);
   2735   __ cmp(FieldOperand(scratch, PropertyCell::kValueOffset),
   2736          Immediate(Smi::FromInt(Isolate::kProtectorValid)));
   2737   __ j(not_equal, &runtime_call);
   2738 
   2739   // Check that the map of the initial array iterator hasn't changed.
   2740   __ mov(scratch2, NativeContextOperand());
   2741   __ mov(scratch,
   2742          ContextOperand(scratch2,
   2743                         Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_INDEX));
   2744   __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
   2745   __ cmp(scratch,
   2746          ContextOperand(scratch2,
   2747                         Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX));
   2748   __ j(not_equal, &runtime_call);
   2749 
   2750   // For FastPacked kinds, iteration will have the same effect as simply
   2751   // accessing each property in order.
   2752   Label no_protector_check;
   2753   __ mov(scratch, FieldOperand(spread_map, Map::kBitField2Offset));
   2754   __ DecodeField<Map::ElementsKindBits>(scratch);
   2755   __ cmp(scratch, Immediate(FAST_HOLEY_ELEMENTS));
   2756   __ j(above, &runtime_call);
   2757   // For non-FastHoley kinds, we can skip the protector check.
   2758   __ cmp(scratch, Immediate(FAST_SMI_ELEMENTS));
   2759   __ j(equal, &no_protector_check);
   2760   __ cmp(scratch, Immediate(FAST_ELEMENTS));
   2761   __ j(equal, &no_protector_check);
   2762   // Check the ArrayProtector cell.
   2763   __ LoadRoot(scratch, Heap::kArrayProtectorRootIndex);
   2764   __ cmp(FieldOperand(scratch, PropertyCell::kValueOffset),
   2765          Immediate(Smi::FromInt(Isolate::kProtectorValid)));
   2766   __ j(not_equal, &runtime_call);
   2767 
   2768   __ bind(&no_protector_check);
   2769   // Load the FixedArray backing store, but use the length from the array.
   2770   __ mov(spread_len, FieldOperand(spread, JSArray::kLengthOffset));
   2771   __ SmiUntag(spread_len);
   2772   __ mov(spread, FieldOperand(spread, JSArray::kElementsOffset));
   2773   __ jmp(&push_args);
   2774 
   2775   __ bind(&runtime_call);
   2776   {
   2777     // Call the builtin for the result of the spread.
   2778     FrameScope scope(masm, StackFrame::INTERNAL);
   2779     // Need to save these on the stack.
   2780     // Restore edx/edi from stX0/stX1.
   2781     __ lea(esp, Operand(esp, -2 * kFloatSize));
   2782     __ fstp_s(MemOperand(esp, 0));
   2783     __ fstp_s(MemOperand(esp, 4));
   2784     __ pop(edx);
   2785     __ pop(edi);
   2786 
   2787     __ Push(edi);
   2788     __ Push(edx);
   2789     __ SmiTag(argc);
   2790     __ Push(argc);
   2791     __ Push(spread);
   2792     __ CallRuntime(Runtime::kSpreadIterableFixed);
   2793     __ mov(spread, eax);
   2794     __ Pop(argc);
   2795     __ SmiUntag(argc);
   2796     __ Pop(edx);
   2797     __ Pop(edi);
   2798     // Free up some registers.
   2799     // Save edx/edi to stX0/stX1.
   2800     __ push(edx);
   2801     __ push(edi);
   2802     __ fld_s(MemOperand(esp, 0));
   2803     __ fld_s(MemOperand(esp, 4));
   2804     __ lea(esp, Operand(esp, 2 * kFloatSize));
   2805   }
   2806 
   2807   {
   2808     // Calculate the new nargs including the result of the spread.
   2809     __ mov(spread_len, FieldOperand(spread, FixedArray::kLengthOffset));
   2810     __ SmiUntag(spread_len);
   2811 
   2812     __ bind(&push_args);
   2813     // argc += spread_len - 1. Subtract 1 for the spread itself.
   2814     __ lea(argc, Operand(argc, spread_len, times_1, -1));
   2815   }
   2816 
   2817   // Check for stack overflow.
   2818   {
   2819     // Check the stack for overflow. We are not trying to catch interruptions
   2820     // (i.e. debug break and preemption) here, so check the "real stack limit".
   2821     Label done;
   2822     __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex);
   2823     // Make scratch the space we have left. The stack might already be
   2824     // overflowed here which will cause scratch to become negative.
   2825     __ neg(scratch);
   2826     __ add(scratch, esp);
   2827     __ sar(scratch, kPointerSizeLog2);
   2828     // Check if the arguments will overflow the stack.
   2829     __ cmp(scratch, spread_len);
   2830     __ j(greater, &done, Label::kNear);  // Signed comparison.
   2831     __ TailCallRuntime(Runtime::kThrowStackOverflow);
   2832     __ bind(&done);
   2833   }
   2834 
   2835   // Put the evaluated spread onto the stack as additional arguments.
   2836   {
   2837     Register return_address = edi;
   2838     // Pop the return address and spread argument.
   2839     __ PopReturnAddressTo(return_address);
   2840     __ Pop(scratch);
   2841 
   2842     Register scratch2 = esi;
   2843     // Save esi to stX0, edx/edi in stX1/stX2 now.
   2844     __ push(esi);
   2845     __ fld_s(MemOperand(esp, 0));
   2846     __ lea(esp, Operand(esp, 1 * kFloatSize));
   2847 
   2848     __ mov(scratch, Immediate(0));
   2849     Label done, push, loop;
   2850     __ bind(&loop);
   2851     __ cmp(scratch, spread_len);
   2852     __ j(equal, &done, Label::kNear);
   2853     __ mov(scratch2, FieldOperand(spread, scratch, times_pointer_size,
   2854                                   FixedArray::kHeaderSize));
   2855     __ JumpIfNotRoot(scratch2, Heap::kTheHoleValueRootIndex, &push);
   2856     __ LoadRoot(scratch2, Heap::kUndefinedValueRootIndex);
   2857     __ bind(&push);
   2858     __ Push(scratch2);
   2859     __ inc(scratch);
   2860     __ jmp(&loop);
   2861     __ bind(&done);
   2862     __ PushReturnAddressFrom(return_address);
   2863 
   2864     // Now Restore esi from stX0, edx/edi from stX1/stX2.
   2865     __ lea(esp, Operand(esp, -3 * kFloatSize));
   2866     __ fstp_s(MemOperand(esp, 0));
   2867     __ fstp_s(MemOperand(esp, 4));
   2868     __ fstp_s(MemOperand(esp, 8));
   2869     __ pop(esi);
   2870     __ pop(edx);
   2871     __ pop(edi);
   2872   }
   2873 }
   2874 
   2875 // static
   2876 void Builtins::Generate_CallWithSpread(MacroAssembler* masm) {
   2877   // ----------- S t a t e -------------
   2878   //  -- eax : the number of arguments (not including the receiver)
   2879   //  -- edi : the target to call (can be any Object)
   2880   // -----------------------------------
   2881 
   2882   // CheckSpreadAndPushToStack will push edx to save it.
   2883   __ LoadRoot(edx, Heap::kUndefinedValueRootIndex);
   2884   CheckSpreadAndPushToStack(masm);
   2885   __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
   2886                                             TailCallMode::kDisallow),
   2887           RelocInfo::CODE_TARGET);
   2888 }
   2889 
   2890 // static
   2891 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
   2892   // ----------- S t a t e -------------
   2893   //  -- eax : the number of arguments (not including the receiver)
   2894   //  -- edx : the new target (checked to be a constructor)
   2895   //  -- edi : the constructor to call (checked to be a JSFunction)
   2896   // -----------------------------------
   2897   __ AssertFunction(edi);
   2898 
   2899   // Calling convention for function specific ConstructStubs require
   2900   // ebx to contain either an AllocationSite or undefined.
   2901   __ LoadRoot(ebx, Heap::kUndefinedValueRootIndex);
   2902 
   2903   // Tail call to the function-specific construct stub (still in the caller
   2904   // context at this point).
   2905   __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
   2906   __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kConstructStubOffset));
   2907   __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize));
   2908   __ jmp(ecx);
   2909 }
   2910 
   2911 // static
   2912 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
   2913   // ----------- S t a t e -------------
   2914   //  -- eax : the number of arguments (not including the receiver)
   2915   //  -- edx : the new target (checked to be a constructor)
   2916   //  -- edi : the constructor to call (checked to be a JSBoundFunction)
   2917   // -----------------------------------
   2918   __ AssertBoundFunction(edi);
   2919 
   2920   // Push the [[BoundArguments]] onto the stack.
   2921   Generate_PushBoundArguments(masm);
   2922 
   2923   // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
   2924   {
   2925     Label done;
   2926     __ cmp(edi, edx);
   2927     __ j(not_equal, &done, Label::kNear);
   2928     __ mov(edx, FieldOperand(edi, JSBoundFunction::kBoundTargetFunctionOffset));
   2929     __ bind(&done);
   2930   }
   2931 
   2932   // Construct the [[BoundTargetFunction]] via the Construct builtin.
   2933   __ mov(edi, FieldOperand(edi, JSBoundFunction::kBoundTargetFunctionOffset));
   2934   __ mov(ecx, Operand::StaticVariable(
   2935                   ExternalReference(Builtins::kConstruct, masm->isolate())));
   2936   __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize));
   2937   __ jmp(ecx);
   2938 }
   2939 
   2940 // static
   2941 void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
   2942   // ----------- S t a t e -------------
   2943   //  -- eax : the number of arguments (not including the receiver)
   2944   //  -- edi : the constructor to call (checked to be a JSProxy)
   2945   //  -- edx : the new target (either the same as the constructor or
   2946   //           the JSFunction on which new was invoked initially)
   2947   // -----------------------------------
   2948 
   2949   // Call into the Runtime for Proxy [[Construct]].
   2950   __ PopReturnAddressTo(ecx);
   2951   __ Push(edi);
   2952   __ Push(edx);
   2953   __ PushReturnAddressFrom(ecx);
   2954   // Include the pushed new_target, constructor and the receiver.
   2955   __ add(eax, Immediate(3));
   2956   // Tail-call to the runtime.
   2957   __ JumpToExternalReference(
   2958       ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
   2959 }
   2960 
   2961 // static
   2962 void Builtins::Generate_Construct(MacroAssembler* masm) {
   2963   // ----------- S t a t e -------------
   2964   //  -- eax : the number of arguments (not including the receiver)
   2965   //  -- edx : the new target (either the same as the constructor or
   2966   //           the JSFunction on which new was invoked initially)
   2967   //  -- edi : the constructor to call (can be any Object)
   2968   // -----------------------------------
   2969 
   2970   // Check if target is a Smi.
   2971   Label non_constructor;
   2972   __ JumpIfSmi(edi, &non_constructor, Label::kNear);
   2973 
   2974   // Dispatch based on instance type.
   2975   __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
   2976   __ j(equal, masm->isolate()->builtins()->ConstructFunction(),
   2977        RelocInfo::CODE_TARGET);
   2978 
   2979   // Check if target has a [[Construct]] internal method.
   2980   __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
   2981             Immediate(1 << Map::kIsConstructor));
   2982   __ j(zero, &non_constructor, Label::kNear);
   2983 
   2984   // Only dispatch to bound functions after checking whether they are
   2985   // constructors.
   2986   __ CmpInstanceType(ecx, JS_BOUND_FUNCTION_TYPE);
   2987   __ j(equal, masm->isolate()->builtins()->ConstructBoundFunction(),
   2988        RelocInfo::CODE_TARGET);
   2989 
   2990   // Only dispatch to proxies after checking whether they are constructors.
   2991   __ CmpInstanceType(ecx, JS_PROXY_TYPE);
   2992   __ j(equal, masm->isolate()->builtins()->ConstructProxy(),
   2993        RelocInfo::CODE_TARGET);
   2994 
   2995   // Called Construct on an exotic Object with a [[Construct]] internal method.
   2996   {
   2997     // Overwrite the original receiver with the (original) target.
   2998     __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), edi);
   2999     // Let the "call_as_constructor_delegate" take care of the rest.
   3000     __ LoadGlobalFunction(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, edi);
   3001     __ Jump(masm->isolate()->builtins()->CallFunction(),
   3002             RelocInfo::CODE_TARGET);
   3003   }
   3004 
   3005   // Called Construct on an Object that doesn't have a [[Construct]] internal
   3006   // method.
   3007   __ bind(&non_constructor);
   3008   __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
   3009           RelocInfo::CODE_TARGET);
   3010 }
   3011 
   3012 // static
   3013 void Builtins::Generate_ConstructWithSpread(MacroAssembler* masm) {
   3014   // ----------- S t a t e -------------
   3015   //  -- eax : the number of arguments (not including the receiver)
   3016   //  -- edx : the new target (either the same as the constructor or
   3017   //           the JSFunction on which new was invoked initially)
   3018   //  -- edi : the constructor to call (can be any Object)
   3019   // -----------------------------------
   3020 
   3021   CheckSpreadAndPushToStack(masm);
   3022   __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
   3023 }
   3024 
   3025 // static
   3026 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
   3027   // ----------- S t a t e -------------
   3028   //  -- edx    : requested object size (untagged)
   3029   //  -- esp[0] : return address
   3030   // -----------------------------------
   3031   __ SmiTag(edx);
   3032   __ PopReturnAddressTo(ecx);
   3033   __ Push(edx);
   3034   __ PushReturnAddressFrom(ecx);
   3035   __ Move(esi, Smi::kZero);
   3036   __ TailCallRuntime(Runtime::kAllocateInNewSpace);
   3037 }
   3038 
   3039 // static
   3040 void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
   3041   // ----------- S t a t e -------------
   3042   //  -- edx    : requested object size (untagged)
   3043   //  -- esp[0] : return address
   3044   // -----------------------------------
   3045   __ SmiTag(edx);
   3046   __ PopReturnAddressTo(ecx);
   3047   __ Push(edx);
   3048   __ Push(Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
   3049   __ PushReturnAddressFrom(ecx);
   3050   __ Move(esi, Smi::kZero);
   3051   __ TailCallRuntime(Runtime::kAllocateInTargetSpace);
   3052 }
   3053 
   3054 // static
   3055 void Builtins::Generate_Abort(MacroAssembler* masm) {
   3056   // ----------- S t a t e -------------
   3057   //  -- edx    : message_id as Smi
   3058   //  -- esp[0] : return address
   3059   // -----------------------------------
   3060   __ PopReturnAddressTo(ecx);
   3061   __ Push(edx);
   3062   __ PushReturnAddressFrom(ecx);
   3063   __ Move(esi, Smi::kZero);
   3064   __ TailCallRuntime(Runtime::kAbort);
   3065 }
   3066 
   3067 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
   3068   // ----------- S t a t e -------------
   3069   //  -- eax : actual number of arguments
   3070   //  -- ebx : expected number of arguments
   3071   //  -- edx : new target (passed through to callee)
   3072   //  -- edi : function (passed through to callee)
   3073   // -----------------------------------
   3074 
   3075   Label invoke, dont_adapt_arguments, stack_overflow;
   3076   __ IncrementCounter(masm->isolate()->counters()->arguments_adaptors(), 1);
   3077 
   3078   Label enough, too_few;
   3079   __ cmp(eax, ebx);
   3080   __ j(less, &too_few);
   3081   __ cmp(ebx, SharedFunctionInfo::kDontAdaptArgumentsSentinel);
   3082   __ j(equal, &dont_adapt_arguments);
   3083 
   3084   {  // Enough parameters: Actual >= expected.
   3085     __ bind(&enough);
   3086     EnterArgumentsAdaptorFrame(masm);
   3087     // edi is used as a scratch register. It should be restored from the frame
   3088     // when needed.
   3089     Generate_StackOverflowCheck(masm, ebx, ecx, edi, &stack_overflow);
   3090 
   3091     // Copy receiver and all expected arguments.
   3092     const int offset = StandardFrameConstants::kCallerSPOffset;
   3093     __ lea(edi, Operand(ebp, eax, times_4, offset));
   3094     __ mov(eax, -1);  // account for receiver
   3095 
   3096     Label copy;
   3097     __ bind(&copy);
   3098     __ inc(eax);
   3099     __ push(Operand(edi, 0));
   3100     __ sub(edi, Immediate(kPointerSize));
   3101     __ cmp(eax, ebx);
   3102     __ j(less, &copy);
   3103     // eax now contains the expected number of arguments.
   3104     __ jmp(&invoke);
   3105   }
   3106 
   3107   {  // Too few parameters: Actual < expected.
   3108     __ bind(&too_few);
   3109     EnterArgumentsAdaptorFrame(masm);
   3110     // edi is used as a scratch register. It should be restored from the frame
   3111     // when needed.
   3112     Generate_StackOverflowCheck(masm, ebx, ecx, edi, &stack_overflow);
   3113 
   3114     // Remember expected arguments in ecx.
   3115     __ mov(ecx, ebx);
   3116 
   3117     // Copy receiver and all actual arguments.
   3118     const int offset = StandardFrameConstants::kCallerSPOffset;
   3119     __ lea(edi, Operand(ebp, eax, times_4, offset));
   3120     // ebx = expected - actual.
   3121     __ sub(ebx, eax);
   3122     // eax = -actual - 1
   3123     __ neg(eax);
   3124     __ sub(eax, Immediate(1));
   3125 
   3126     Label copy;
   3127     __ bind(&copy);
   3128     __ inc(eax);
   3129     __ push(Operand(edi, 0));
   3130     __ sub(edi, Immediate(kPointerSize));
   3131     __ test(eax, eax);
   3132     __ j(not_zero, &copy);
   3133 
   3134     // Fill remaining expected arguments with undefined values.
   3135     Label fill;
   3136     __ bind(&fill);
   3137     __ inc(eax);
   3138     __ push(Immediate(masm->isolate()->factory()->undefined_value()));
   3139     __ cmp(eax, ebx);
   3140     __ j(less, &fill);
   3141 
   3142     // Restore expected arguments.
   3143     __ mov(eax, ecx);
   3144   }
   3145 
   3146   // Call the entry point.
   3147   __ bind(&invoke);
   3148   // Restore function pointer.
   3149   __ mov(edi, Operand(ebp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
   3150   // eax : expected number of arguments
   3151   // edx : new target (passed through to callee)
   3152   // edi : function (passed through to callee)
   3153   __ mov(ecx, FieldOperand(edi, JSFunction::kCodeEntryOffset));
   3154   __ call(ecx);
   3155 
   3156   // Store offset of return address for deoptimizer.
   3157   masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
   3158 
   3159   // Leave frame and return.
   3160   LeaveArgumentsAdaptorFrame(masm);
   3161   __ ret(0);
   3162 
   3163   // -------------------------------------------
   3164   // Dont adapt arguments.
   3165   // -------------------------------------------
   3166   __ bind(&dont_adapt_arguments);
   3167   __ mov(ecx, FieldOperand(edi, JSFunction::kCodeEntryOffset));
   3168   __ jmp(ecx);
   3169 
   3170   __ bind(&stack_overflow);
   3171   {
   3172     FrameScope frame(masm, StackFrame::MANUAL);
   3173     __ CallRuntime(Runtime::kThrowStackOverflow);
   3174     __ int3();
   3175   }
   3176 }
   3177 
   3178 static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
   3179                                     Register function_template_info,
   3180                                     Register scratch0, Register scratch1,
   3181                                     Label* receiver_check_failed) {
   3182   // If there is no signature, return the holder.
   3183   __ CompareRoot(FieldOperand(function_template_info,
   3184                               FunctionTemplateInfo::kSignatureOffset),
   3185                  Heap::kUndefinedValueRootIndex);
   3186   Label receiver_check_passed;
   3187   __ j(equal, &receiver_check_passed, Label::kNear);
   3188 
   3189   // Walk the prototype chain.
   3190   __ mov(scratch0, FieldOperand(receiver, HeapObject::kMapOffset));
   3191   Label prototype_loop_start;
   3192   __ bind(&prototype_loop_start);
   3193 
   3194   // Get the constructor, if any.
   3195   __ GetMapConstructor(scratch0, scratch0, scratch1);
   3196   __ CmpInstanceType(scratch1, JS_FUNCTION_TYPE);
   3197   Label next_prototype;
   3198   __ j(not_equal, &next_prototype, Label::kNear);
   3199 
   3200   // Get the constructor's signature.
   3201   __ mov(scratch0,
   3202          FieldOperand(scratch0, JSFunction::kSharedFunctionInfoOffset));
   3203   __ mov(scratch0,
   3204          FieldOperand(scratch0, SharedFunctionInfo::kFunctionDataOffset));
   3205 
   3206   // Loop through the chain of inheriting function templates.
   3207   Label function_template_loop;
   3208   __ bind(&function_template_loop);
   3209 
   3210   // If the signatures match, we have a compatible receiver.
   3211   __ cmp(scratch0, FieldOperand(function_template_info,
   3212                                 FunctionTemplateInfo::kSignatureOffset));
   3213   __ j(equal, &receiver_check_passed, Label::kNear);
   3214 
   3215   // If the current type is not a FunctionTemplateInfo, load the next prototype
   3216   // in the chain.
   3217   __ JumpIfSmi(scratch0, &next_prototype, Label::kNear);
   3218   __ CmpObjectType(scratch0, FUNCTION_TEMPLATE_INFO_TYPE, scratch1);
   3219   __ j(not_equal, &next_prototype, Label::kNear);
   3220 
   3221   // Otherwise load the parent function template and iterate.
   3222   __ mov(scratch0,
   3223          FieldOperand(scratch0, FunctionTemplateInfo::kParentTemplateOffset));
   3224   __ jmp(&function_template_loop, Label::kNear);
   3225 
   3226   // Load the next prototype.
   3227   __ bind(&next_prototype);
   3228   __ mov(receiver, FieldOperand(receiver, HeapObject::kMapOffset));
   3229   __ test(FieldOperand(receiver, Map::kBitField3Offset),
   3230           Immediate(Map::HasHiddenPrototype::kMask));
   3231   __ j(zero, receiver_check_failed);
   3232 
   3233   __ mov(receiver, FieldOperand(receiver, Map::kPrototypeOffset));
   3234   __ mov(scratch0, FieldOperand(receiver, HeapObject::kMapOffset));
   3235   // Iterate.
   3236   __ jmp(&prototype_loop_start, Label::kNear);
   3237 
   3238   __ bind(&receiver_check_passed);
   3239 }
   3240 
   3241 void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
   3242   // ----------- S t a t e -------------
   3243   //  -- eax                : number of arguments (not including the receiver)
   3244   //  -- edi                : callee
   3245   //  -- esi                : context
   3246   //  -- esp[0]             : return address
   3247   //  -- esp[4]             : last argument
   3248   //  -- ...
   3249   //  -- esp[eax * 4]       : first argument
   3250   //  -- esp[(eax + 1) * 4] : receiver
   3251   // -----------------------------------
   3252 
   3253   // Load the FunctionTemplateInfo.
   3254   __ mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
   3255   __ mov(ebx, FieldOperand(ebx, SharedFunctionInfo::kFunctionDataOffset));
   3256 
   3257   // Do the compatible receiver check.
   3258   Label receiver_check_failed;
   3259   __ mov(ecx, Operand(esp, eax, times_pointer_size, kPCOnStackSize));
   3260   __ Push(eax);
   3261   CompatibleReceiverCheck(masm, ecx, ebx, edx, eax, &receiver_check_failed);
   3262   __ Pop(eax);
   3263   // Get the callback offset from the FunctionTemplateInfo, and jump to the
   3264   // beginning of the code.
   3265   __ mov(edx, FieldOperand(ebx, FunctionTemplateInfo::kCallCodeOffset));
   3266   __ mov(edx, FieldOperand(edx, CallHandlerInfo::kFastHandlerOffset));
   3267   __ add(edx, Immediate(Code::kHeaderSize - kHeapObjectTag));
   3268   __ jmp(edx);
   3269 
   3270   // Compatible receiver check failed: pop return address, arguments and
   3271   // receiver and throw an Illegal Invocation exception.
   3272   __ bind(&receiver_check_failed);
   3273   __ Pop(eax);
   3274   __ PopReturnAddressTo(ebx);
   3275   __ lea(eax, Operand(eax, times_pointer_size, 1 * kPointerSize));
   3276   __ add(esp, eax);
   3277   __ PushReturnAddressFrom(ebx);
   3278   {
   3279     FrameScope scope(masm, StackFrame::INTERNAL);
   3280     __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
   3281   }
   3282 }
   3283 
   3284 static void Generate_OnStackReplacementHelper(MacroAssembler* masm,
   3285                                               bool has_handler_frame) {
   3286   // Lookup the function in the JavaScript frame.
   3287   if (has_handler_frame) {
   3288     __ mov(eax, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
   3289     __ mov(eax, Operand(eax, JavaScriptFrameConstants::kFunctionOffset));
   3290   } else {
   3291     __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
   3292   }
   3293 
   3294   {
   3295     FrameScope scope(masm, StackFrame::INTERNAL);
   3296     // Pass function as argument.
   3297     __ push(eax);
   3298     __ CallRuntime(Runtime::kCompileForOnStackReplacement);
   3299   }
   3300 
   3301   Label skip;
   3302   // If the code object is null, just return to the caller.
   3303   __ cmp(eax, Immediate(0));
   3304   __ j(not_equal, &skip, Label::kNear);
   3305   __ ret(0);
   3306 
   3307   __ bind(&skip);
   3308 
   3309   // Drop any potential handler frame that is be sitting on top of the actual
   3310   // JavaScript frame. This is the case then OSR is triggered from bytecode.
   3311   if (has_handler_frame) {
   3312     __ leave();
   3313   }
   3314 
   3315   // Load deoptimization data from the code object.
   3316   __ mov(ebx, Operand(eax, Code::kDeoptimizationDataOffset - kHeapObjectTag));
   3317 
   3318   // Load the OSR entrypoint offset from the deoptimization data.
   3319   __ mov(ebx, Operand(ebx, FixedArray::OffsetOfElementAt(
   3320                                DeoptimizationInputData::kOsrPcOffsetIndex) -
   3321                                kHeapObjectTag));
   3322   __ SmiUntag(ebx);
   3323 
   3324   // Compute the target address = code_obj + header_size + osr_offset
   3325   __ lea(eax, Operand(eax, ebx, times_1, Code::kHeaderSize - kHeapObjectTag));
   3326 
   3327   // Overwrite the return address on the stack.
   3328   __ mov(Operand(esp, 0), eax);
   3329 
   3330   // And "return" to the OSR entry point of the function.
   3331   __ ret(0);
   3332 }
   3333 
   3334 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
   3335   Generate_OnStackReplacementHelper(masm, false);
   3336 }
   3337 
   3338 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
   3339   Generate_OnStackReplacementHelper(masm, true);
   3340 }
   3341 
   3342 #undef __
   3343 }  // namespace internal
   3344 }  // namespace v8
   3345 
   3346 #endif  // V8_TARGET_ARCH_X87
   3347