Home | History | Annotate | Download | only in x64
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "src/v8.h"
      6 
      7 #if V8_TARGET_ARCH_X64
      8 
      9 #include "src/codegen.h"
     10 #include "src/deoptimizer.h"
     11 #include "src/full-codegen.h"
     12 #include "src/stub-cache.h"
     13 
     14 namespace v8 {
     15 namespace internal {
     16 
     17 
     18 #define __ ACCESS_MASM(masm)
     19 
     20 
     21 void Builtins::Generate_Adaptor(MacroAssembler* masm,
     22                                 CFunctionId id,
     23                                 BuiltinExtraArguments extra_args) {
     24   // ----------- S t a t e -------------
     25   //  -- rax                 : number of arguments excluding receiver
     26   //  -- rdi                 : called function (only guaranteed when
     27   //                           extra_args requires it)
     28   //  -- rsi                 : context
     29   //  -- rsp[0]              : return address
     30   //  -- rsp[8]              : last argument
     31   //  -- ...
     32   //  -- rsp[8 * argc]       : first argument (argc == rax)
     33   //  -- rsp[8 * (argc + 1)] : receiver
     34   // -----------------------------------
     35 
     36   // Insert extra arguments.
     37   int num_extra_args = 0;
     38   if (extra_args == NEEDS_CALLED_FUNCTION) {
     39     num_extra_args = 1;
     40     __ PopReturnAddressTo(kScratchRegister);
     41     __ Push(rdi);
     42     __ PushReturnAddressFrom(kScratchRegister);
     43   } else {
     44     ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
     45   }
     46 
     47   // JumpToExternalReference expects rax to contain the number of arguments
     48   // including the receiver and the extra arguments.
     49   __ addp(rax, Immediate(num_extra_args + 1));
     50   __ JumpToExternalReference(ExternalReference(id, masm->isolate()), 1);
     51 }
     52 
     53 
     54 static void CallRuntimePassFunction(
     55     MacroAssembler* masm, Runtime::FunctionId function_id) {
     56   FrameScope scope(masm, StackFrame::INTERNAL);
     57   // Push a copy of the function onto the stack.
     58   __ Push(rdi);
     59   // Function is also the parameter to the runtime call.
     60   __ Push(rdi);
     61 
     62   __ CallRuntime(function_id, 1);
     63   // Restore receiver.
     64   __ Pop(rdi);
     65 }
     66 
     67 
     68 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
     69   __ movp(kScratchRegister,
     70           FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
     71   __ movp(kScratchRegister,
     72           FieldOperand(kScratchRegister, SharedFunctionInfo::kCodeOffset));
     73   __ leap(kScratchRegister, FieldOperand(kScratchRegister, Code::kHeaderSize));
     74   __ jmp(kScratchRegister);
     75 }
     76 
     77 
     78 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
     79   __ leap(rax, FieldOperand(rax, Code::kHeaderSize));
     80   __ jmp(rax);
     81 }
     82 
     83 
     84 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
     85   // Checking whether the queued function is ready for install is optional,
     86   // since we come across interrupts and stack checks elsewhere.  However,
     87   // not checking may delay installing ready functions, and always checking
     88   // would be quite expensive.  A good compromise is to first check against
     89   // stack limit as a cue for an interrupt signal.
     90   Label ok;
     91   __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
     92   __ j(above_equal, &ok);
     93 
     94   CallRuntimePassFunction(masm, Runtime::kHiddenTryInstallOptimizedCode);
     95   GenerateTailCallToReturnedCode(masm);
     96 
     97   __ bind(&ok);
     98   GenerateTailCallToSharedCode(masm);
     99 }
    100 
    101 
    102 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
    103                                            bool is_api_function,
    104                                            bool create_memento) {
    105   // ----------- S t a t e -------------
    106   //  -- rax: number of arguments
    107   //  -- rdi: constructor function
    108   //  -- rbx: allocation site or undefined
    109   // -----------------------------------
    110 
    111   // Should never create mementos for api functions.
    112   ASSERT(!is_api_function || !create_memento);
    113 
    114   // Enter a construct frame.
    115   {
    116     FrameScope scope(masm, StackFrame::CONSTRUCT);
    117 
    118     if (create_memento) {
    119       __ AssertUndefinedOrAllocationSite(rbx);
    120       __ Push(rbx);
    121     }
    122 
    123     // Store a smi-tagged arguments count on the stack.
    124     __ Integer32ToSmi(rax, rax);
    125     __ Push(rax);
    126 
    127     // Push the function to invoke on the stack.
    128     __ Push(rdi);
    129 
    130     // Try to allocate the object without transitioning into C code. If any of
    131     // the preconditions is not met, the code bails out to the runtime call.
    132     Label rt_call, allocated;
    133     if (FLAG_inline_new) {
    134       Label undo_allocation;
    135 
    136       ExternalReference debug_step_in_fp =
    137           ExternalReference::debug_step_in_fp_address(masm->isolate());
    138       __ Move(kScratchRegister, debug_step_in_fp);
    139       __ cmpp(Operand(kScratchRegister, 0), Immediate(0));
    140       __ j(not_equal, &rt_call);
    141 
    142       // Verified that the constructor is a JSFunction.
    143       // Load the initial map and verify that it is in fact a map.
    144       // rdi: constructor
    145       __ movp(rax, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
    146       // Will both indicate a NULL and a Smi
    147       ASSERT(kSmiTag == 0);
    148       __ JumpIfSmi(rax, &rt_call);
    149       // rdi: constructor
    150       // rax: initial map (if proven valid below)
    151       __ CmpObjectType(rax, MAP_TYPE, rbx);
    152       __ j(not_equal, &rt_call);
    153 
    154       // Check that the constructor is not constructing a JSFunction (see
    155       // comments in Runtime_NewObject in runtime.cc). In which case the
    156       // initial map's instance type would be JS_FUNCTION_TYPE.
    157       // rdi: constructor
    158       // rax: initial map
    159       __ CmpInstanceType(rax, JS_FUNCTION_TYPE);
    160       __ j(equal, &rt_call);
    161 
    162       if (!is_api_function) {
    163         Label allocate;
    164         // The code below relies on these assumptions.
    165         STATIC_ASSERT(JSFunction::kNoSlackTracking == 0);
    166         STATIC_ASSERT(Map::ConstructionCount::kShift +
    167                       Map::ConstructionCount::kSize == 32);
    168         // Check if slack tracking is enabled.
    169         __ movl(rsi, FieldOperand(rax, Map::kBitField3Offset));
    170         __ shrl(rsi, Immediate(Map::ConstructionCount::kShift));
    171         __ j(zero, &allocate);  // JSFunction::kNoSlackTracking
    172         // Decrease generous allocation count.
    173         __ subl(FieldOperand(rax, Map::kBitField3Offset),
    174                 Immediate(1 << Map::ConstructionCount::kShift));
    175 
    176         __ cmpl(rsi, Immediate(JSFunction::kFinishSlackTracking));
    177         __ j(not_equal, &allocate);
    178 
    179         __ Push(rax);
    180         __ Push(rdi);
    181 
    182         __ Push(rdi);  // constructor
    183         __ CallRuntime(Runtime::kHiddenFinalizeInstanceSize, 1);
    184 
    185         __ Pop(rdi);
    186         __ Pop(rax);
    187         __ xorl(rsi, rsi);  // JSFunction::kNoSlackTracking
    188 
    189         __ bind(&allocate);
    190       }
    191 
    192       // Now allocate the JSObject on the heap.
    193       __ movzxbp(rdi, FieldOperand(rax, Map::kInstanceSizeOffset));
    194       __ shlp(rdi, Immediate(kPointerSizeLog2));
    195       if (create_memento) {
    196         __ addp(rdi, Immediate(AllocationMemento::kSize));
    197       }
    198       // rdi: size of new object
    199       __ Allocate(rdi,
    200                   rbx,
    201                   rdi,
    202                   no_reg,
    203                   &rt_call,
    204                   NO_ALLOCATION_FLAGS);
    205       Factory* factory = masm->isolate()->factory();
    206       // Allocated the JSObject, now initialize the fields.
    207       // rax: initial map
    208       // rbx: JSObject (not HeapObject tagged - the actual address).
    209       // rdi: start of next object (including memento if create_memento)
    210       __ movp(Operand(rbx, JSObject::kMapOffset), rax);
    211       __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
    212       __ movp(Operand(rbx, JSObject::kPropertiesOffset), rcx);
    213       __ movp(Operand(rbx, JSObject::kElementsOffset), rcx);
    214       // Set extra fields in the newly allocated object.
    215       // rax: initial map
    216       // rbx: JSObject
    217       // rdi: start of next object (including memento if create_memento)
    218       // rsi: slack tracking counter (non-API function case)
    219       __ leap(rcx, Operand(rbx, JSObject::kHeaderSize));
    220       __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
    221       if (!is_api_function) {
    222         Label no_inobject_slack_tracking;
    223 
    224         // Check if slack tracking is enabled.
    225         __ cmpl(rsi, Immediate(JSFunction::kNoSlackTracking));
    226         __ j(equal, &no_inobject_slack_tracking);
    227 
    228         // Allocate object with a slack.
    229         __ movzxbp(rsi,
    230                    FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset));
    231         __ leap(rsi,
    232                Operand(rbx, rsi, times_pointer_size, JSObject::kHeaderSize));
    233         // rsi: offset of first field after pre-allocated fields
    234         if (FLAG_debug_code) {
    235           __ cmpp(rsi, rdi);
    236           __ Assert(less_equal,
    237                     kUnexpectedNumberOfPreAllocatedPropertyFields);
    238         }
    239         __ InitializeFieldsWithFiller(rcx, rsi, rdx);
    240         __ LoadRoot(rdx, Heap::kOnePointerFillerMapRootIndex);
    241         // Fill the remaining fields with one pointer filler map.
    242 
    243         __ bind(&no_inobject_slack_tracking);
    244       }
    245       if (create_memento) {
    246         __ leap(rsi, Operand(rdi, -AllocationMemento::kSize));
    247         __ InitializeFieldsWithFiller(rcx, rsi, rdx);
    248 
    249         // Fill in memento fields if necessary.
    250         // rsi: points to the allocated but uninitialized memento.
    251         __ Move(Operand(rsi, AllocationMemento::kMapOffset),
    252                 factory->allocation_memento_map());
    253         // Get the cell or undefined.
    254         __ movp(rdx, Operand(rsp, kPointerSize*2));
    255         __ movp(Operand(rsi, AllocationMemento::kAllocationSiteOffset), rdx);
    256       } else {
    257         __ InitializeFieldsWithFiller(rcx, rdi, rdx);
    258       }
    259 
    260       // Add the object tag to make the JSObject real, so that we can continue
    261       // and jump into the continuation code at any time from now on. Any
    262       // failures need to undo the allocation, so that the heap is in a
    263       // consistent state and verifiable.
    264       // rax: initial map
    265       // rbx: JSObject
    266       // rdi: start of next object
    267       __ orp(rbx, Immediate(kHeapObjectTag));
    268 
    269       // Check if a non-empty properties array is needed.
    270       // Allocate and initialize a FixedArray if it is.
    271       // rax: initial map
    272       // rbx: JSObject
    273       // rdi: start of next object
    274       // Calculate total properties described map.
    275       __ movzxbp(rdx, FieldOperand(rax, Map::kUnusedPropertyFieldsOffset));
    276       __ movzxbp(rcx,
    277                  FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset));
    278       __ addp(rdx, rcx);
    279       // Calculate unused properties past the end of the in-object properties.
    280       __ movzxbp(rcx, FieldOperand(rax, Map::kInObjectPropertiesOffset));
    281       __ subp(rdx, rcx);
    282       // Done if no extra properties are to be allocated.
    283       __ j(zero, &allocated);
    284       __ Assert(positive, kPropertyAllocationCountFailed);
    285 
    286       // Scale the number of elements by pointer size and add the header for
    287       // FixedArrays to the start of the next object calculation from above.
    288       // rbx: JSObject
    289       // rdi: start of next object (will be start of FixedArray)
    290       // rdx: number of elements in properties array
    291       __ Allocate(FixedArray::kHeaderSize,
    292                   times_pointer_size,
    293                   rdx,
    294                   rdi,
    295                   rax,
    296                   no_reg,
    297                   &undo_allocation,
    298                   RESULT_CONTAINS_TOP);
    299 
    300       // Initialize the FixedArray.
    301       // rbx: JSObject
    302       // rdi: FixedArray
    303       // rdx: number of elements
    304       // rax: start of next object
    305       __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex);
    306       __ movp(Operand(rdi, HeapObject::kMapOffset), rcx);  // setup the map
    307       __ Integer32ToSmi(rdx, rdx);
    308       __ movp(Operand(rdi, FixedArray::kLengthOffset), rdx);  // and length
    309 
    310       // Initialize the fields to undefined.
    311       // rbx: JSObject
    312       // rdi: FixedArray
    313       // rax: start of next object
    314       // rdx: number of elements
    315       { Label loop, entry;
    316         __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
    317         __ leap(rcx, Operand(rdi, FixedArray::kHeaderSize));
    318         __ jmp(&entry);
    319         __ bind(&loop);
    320         __ movp(Operand(rcx, 0), rdx);
    321         __ addp(rcx, Immediate(kPointerSize));
    322         __ bind(&entry);
    323         __ cmpp(rcx, rax);
    324         __ j(below, &loop);
    325       }
    326 
    327       // Store the initialized FixedArray into the properties field of
    328       // the JSObject
    329       // rbx: JSObject
    330       // rdi: FixedArray
    331       __ orp(rdi, Immediate(kHeapObjectTag));  // add the heap tag
    332       __ movp(FieldOperand(rbx, JSObject::kPropertiesOffset), rdi);
    333 
    334 
    335       // Continue with JSObject being successfully allocated
    336       // rbx: JSObject
    337       __ jmp(&allocated);
    338 
    339       // Undo the setting of the new top so that the heap is verifiable. For
    340       // example, the map's unused properties potentially do not match the
    341       // allocated objects unused properties.
    342       // rbx: JSObject (previous new top)
    343       __ bind(&undo_allocation);
    344       __ UndoAllocationInNewSpace(rbx);
    345     }
    346 
    347     // Allocate the new receiver object using the runtime call.
    348     // rdi: function (constructor)
    349     __ bind(&rt_call);
    350     int offset = 0;
    351     if (create_memento) {
    352       // Get the cell or allocation site.
    353       __ movp(rdi, Operand(rsp, kPointerSize*2));
    354       __ Push(rdi);
    355       offset = kPointerSize;
    356     }
    357 
    358     // Must restore rsi (context) and rdi (constructor) before calling runtime.
    359     __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
    360     __ movp(rdi, Operand(rsp, offset));
    361     __ Push(rdi);
    362     if (create_memento) {
    363       __ CallRuntime(Runtime::kHiddenNewObjectWithAllocationSite, 2);
    364     } else {
    365       __ CallRuntime(Runtime::kHiddenNewObject, 1);
    366     }
    367     __ movp(rbx, rax);  // store result in rbx
    368 
    369     // If we ended up using the runtime, and we want a memento, then the
    370     // runtime call made it for us, and we shouldn't do create count
    371     // increment.
    372     Label count_incremented;
    373     if (create_memento) {
    374       __ jmp(&count_incremented);
    375     }
    376 
    377     // New object allocated.
    378     // rbx: newly allocated object
    379     __ bind(&allocated);
    380 
    381     if (create_memento) {
    382       __ movp(rcx, Operand(rsp, kPointerSize*2));
    383       __ Cmp(rcx, masm->isolate()->factory()->undefined_value());
    384       __ j(equal, &count_incremented);
    385       // rcx is an AllocationSite. We are creating a memento from it, so we
    386       // need to increment the memento create count.
    387       __ SmiAddConstant(
    388           FieldOperand(rcx, AllocationSite::kPretenureCreateCountOffset),
    389           Smi::FromInt(1));
    390       __ bind(&count_incremented);
    391     }
    392 
    393     // Retrieve the function from the stack.
    394     __ Pop(rdi);
    395 
    396     // Retrieve smi-tagged arguments count from the stack.
    397     __ movp(rax, Operand(rsp, 0));
    398     __ SmiToInteger32(rax, rax);
    399 
    400     // Push the allocated receiver to the stack. We need two copies
    401     // because we may have to return the original one and the calling
    402     // conventions dictate that the called function pops the receiver.
    403     __ Push(rbx);
    404     __ Push(rbx);
    405 
    406     // Set up pointer to last argument.
    407     __ leap(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
    408 
    409     // Copy arguments and receiver to the expression stack.
    410     Label loop, entry;
    411     __ movp(rcx, rax);
    412     __ jmp(&entry);
    413     __ bind(&loop);
    414     __ Push(Operand(rbx, rcx, times_pointer_size, 0));
    415     __ bind(&entry);
    416     __ decp(rcx);
    417     __ j(greater_equal, &loop);
    418 
    419     // Call the function.
    420     if (is_api_function) {
    421       __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
    422       Handle<Code> code =
    423           masm->isolate()->builtins()->HandleApiCallConstruct();
    424       __ Call(code, RelocInfo::CODE_TARGET);
    425     } else {
    426       ParameterCount actual(rax);
    427       __ InvokeFunction(rdi, actual, CALL_FUNCTION, NullCallWrapper());
    428     }
    429 
    430     // Store offset of return address for deoptimizer.
    431     if (!is_api_function) {
    432       masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
    433     }
    434 
    435     // Restore context from the frame.
    436     __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
    437 
    438     // If the result is an object (in the ECMA sense), we should get rid
    439     // of the receiver and use the result; see ECMA-262 section 13.2.2-7
    440     // on page 74.
    441     Label use_receiver, exit;
    442     // If the result is a smi, it is *not* an object in the ECMA sense.
    443     __ JumpIfSmi(rax, &use_receiver);
    444 
    445     // If the type of the result (stored in its map) is less than
    446     // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
    447     STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
    448     __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
    449     __ j(above_equal, &exit);
    450 
    451     // Throw away the result of the constructor invocation and use the
    452     // on-stack receiver as the result.
    453     __ bind(&use_receiver);
    454     __ movp(rax, Operand(rsp, 0));
    455 
    456     // Restore the arguments count and leave the construct frame.
    457     __ bind(&exit);
    458     __ movp(rbx, Operand(rsp, kPointerSize));  // Get arguments count.
    459 
    460     // Leave construct frame.
    461   }
    462 
    463   // Remove caller arguments from the stack and return.
    464   __ PopReturnAddressTo(rcx);
    465   SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
    466   __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
    467   __ PushReturnAddressFrom(rcx);
    468   Counters* counters = masm->isolate()->counters();
    469   __ IncrementCounter(counters->constructed_objects(), 1);
    470   __ ret(0);
    471 }
    472 
    473 
    474 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
    475   Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
    476 }
    477 
    478 
    479 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
    480   Generate_JSConstructStubHelper(masm, true, false);
    481 }
    482 
    483 
    484 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
    485                                              bool is_construct) {
    486   ProfileEntryHookStub::MaybeCallEntryHook(masm);
    487 
    488   // Expects five C++ function parameters.
    489   // - Address entry (ignored)
    490   // - JSFunction* function (
    491   // - Object* receiver
    492   // - int argc
    493   // - Object*** argv
    494   // (see Handle::Invoke in execution.cc).
    495 
    496   // Open a C++ scope for the FrameScope.
    497   {
    498     // Platform specific argument handling. After this, the stack contains
    499     // an internal frame and the pushed function and receiver, and
    500     // register rax and rbx holds the argument count and argument array,
    501     // while rdi holds the function pointer and rsi the context.
    502 
    503 #ifdef _WIN64
    504     // MSVC parameters in:
    505     // rcx        : entry (ignored)
    506     // rdx        : function
    507     // r8         : receiver
    508     // r9         : argc
    509     // [rsp+0x20] : argv
    510 
    511     // Clear the context before we push it when entering the internal frame.
    512     __ Set(rsi, 0);
    513     // Enter an internal frame.
    514     FrameScope scope(masm, StackFrame::INTERNAL);
    515 
    516     // Load the function context into rsi.
    517     __ movp(rsi, FieldOperand(rdx, JSFunction::kContextOffset));
    518 
    519     // Push the function and the receiver onto the stack.
    520     __ Push(rdx);
    521     __ Push(r8);
    522 
    523     // Load the number of arguments and setup pointer to the arguments.
    524     __ movp(rax, r9);
    525     // Load the previous frame pointer to access C argument on stack
    526     __ movp(kScratchRegister, Operand(rbp, 0));
    527     __ movp(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
    528     // Load the function pointer into rdi.
    529     __ movp(rdi, rdx);
    530 #else  // _WIN64
    531     // GCC parameters in:
    532     // rdi : entry (ignored)
    533     // rsi : function
    534     // rdx : receiver
    535     // rcx : argc
    536     // r8  : argv
    537 
    538     __ movp(rdi, rsi);
    539     // rdi : function
    540 
    541     // Clear the context before we push it when entering the internal frame.
    542     __ Set(rsi, 0);
    543     // Enter an internal frame.
    544     FrameScope scope(masm, StackFrame::INTERNAL);
    545 
    546     // Push the function and receiver and setup the context.
    547     __ Push(rdi);
    548     __ Push(rdx);
    549     __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
    550 
    551     // Load the number of arguments and setup pointer to the arguments.
    552     __ movp(rax, rcx);
    553     __ movp(rbx, r8);
    554 #endif  // _WIN64
    555 
    556     // Current stack contents:
    557     // [rsp + 2 * kPointerSize ... ] : Internal frame
    558     // [rsp + kPointerSize]          : function
    559     // [rsp]                         : receiver
    560     // Current register contents:
    561     // rax : argc
    562     // rbx : argv
    563     // rsi : context
    564     // rdi : function
    565 
    566     // Copy arguments to the stack in a loop.
    567     // Register rbx points to array of pointers to handle locations.
    568     // Push the values of these handles.
    569     Label loop, entry;
    570     __ Set(rcx, 0);  // Set loop variable to 0.
    571     __ jmp(&entry);
    572     __ bind(&loop);
    573     __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
    574     __ Push(Operand(kScratchRegister, 0));  // dereference handle
    575     __ addp(rcx, Immediate(1));
    576     __ bind(&entry);
    577     __ cmpp(rcx, rax);
    578     __ j(not_equal, &loop);
    579 
    580     // Invoke the code.
    581     if (is_construct) {
    582       // No type feedback cell is available
    583       __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
    584       // Expects rdi to hold function pointer.
    585       CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
    586       __ CallStub(&stub);
    587     } else {
    588       ParameterCount actual(rax);
    589       // Function must be in rdi.
    590       __ InvokeFunction(rdi, actual, CALL_FUNCTION, NullCallWrapper());
    591     }
    592     // Exit the internal frame. Notice that this also removes the empty
    593     // context and the function left on the stack by the code
    594     // invocation.
    595   }
    596 
    597   // TODO(X64): Is argument correct? Is there a receiver to remove?
    598   __ ret(1 * kPointerSize);  // Remove receiver.
    599 }
    600 
    601 
    602 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
    603   Generate_JSEntryTrampolineHelper(masm, false);
    604 }
    605 
    606 
    607 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
    608   Generate_JSEntryTrampolineHelper(masm, true);
    609 }
    610 
    611 
    612 void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) {
    613   CallRuntimePassFunction(masm, Runtime::kHiddenCompileUnoptimized);
    614   GenerateTailCallToReturnedCode(masm);
    615 }
    616 
    617 
    618 static void CallCompileOptimized(MacroAssembler* masm,
    619                                             bool concurrent) {
    620   FrameScope scope(masm, StackFrame::INTERNAL);
    621   // Push a copy of the function onto the stack.
    622   __ Push(rdi);
    623   // Function is also the parameter to the runtime call.
    624   __ Push(rdi);
    625   // Whether to compile in a background thread.
    626   __ Push(masm->isolate()->factory()->ToBoolean(concurrent));
    627 
    628   __ CallRuntime(Runtime::kHiddenCompileOptimized, 2);
    629   // Restore receiver.
    630   __ Pop(rdi);
    631 }
    632 
    633 
    634 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
    635   CallCompileOptimized(masm, false);
    636   GenerateTailCallToReturnedCode(masm);
    637 }
    638 
    639 
    640 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
    641   CallCompileOptimized(masm, true);
    642   GenerateTailCallToReturnedCode(masm);
    643 }
    644 
    645 
    646 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
    647   // For now, we are relying on the fact that make_code_young doesn't do any
    648   // garbage collection which allows us to save/restore the registers without
    649   // worrying about which of them contain pointers. We also don't build an
    650   // internal frame to make the code faster, since we shouldn't have to do stack
    651   // crawls in MakeCodeYoung. This seems a bit fragile.
    652 
    653   // Re-execute the code that was patched back to the young age when
    654   // the stub returns.
    655   __ subp(Operand(rsp, 0), Immediate(5));
    656   __ Pushad();
    657   __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
    658   __ movp(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize));
    659   {  // NOLINT
    660     FrameScope scope(masm, StackFrame::MANUAL);
    661     __ PrepareCallCFunction(2);
    662     __ CallCFunction(
    663         ExternalReference::get_make_code_young_function(masm->isolate()), 2);
    664   }
    665   __ Popad();
    666   __ ret(0);
    667 }
    668 
    669 
    670 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C)                 \
    671 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking(  \
    672     MacroAssembler* masm) {                                  \
    673   GenerateMakeCodeYoungAgainCommon(masm);                    \
    674 }                                                            \
    675 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking(   \
    676     MacroAssembler* masm) {                                  \
    677   GenerateMakeCodeYoungAgainCommon(masm);                    \
    678 }
    679 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
    680 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
    681 
    682 
    683 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
    684   // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
    685   // that make_code_young doesn't do any garbage collection which allows us to
    686   // save/restore the registers without worrying about which of them contain
    687   // pointers.
    688   __ Pushad();
    689   __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
    690   __ movp(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize));
    691   __ subp(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength));
    692   {  // NOLINT
    693     FrameScope scope(masm, StackFrame::MANUAL);
    694     __ PrepareCallCFunction(2);
    695     __ CallCFunction(
    696         ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
    697         2);
    698   }
    699   __ Popad();
    700 
    701   // Perform prologue operations usually performed by the young code stub.
    702   __ PopReturnAddressTo(kScratchRegister);
    703   __ pushq(rbp);  // Caller's frame pointer.
    704   __ movp(rbp, rsp);
    705   __ Push(rsi);  // Callee's context.
    706   __ Push(rdi);  // Callee's JS Function.
    707   __ PushReturnAddressFrom(kScratchRegister);
    708 
    709   // Jump to point after the code-age stub.
    710   __ ret(0);
    711 }
    712 
    713 
    714 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
    715   GenerateMakeCodeYoungAgainCommon(masm);
    716 }
    717 
    718 
    719 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
    720                                              SaveFPRegsMode save_doubles) {
    721   // Enter an internal frame.
    722   {
    723     FrameScope scope(masm, StackFrame::INTERNAL);
    724 
    725     // Preserve registers across notification, this is important for compiled
    726     // stubs that tail call the runtime on deopts passing their parameters in
    727     // registers.
    728     __ Pushad();
    729     __ CallRuntime(Runtime::kHiddenNotifyStubFailure, 0, save_doubles);
    730     __ Popad();
    731     // Tear down internal frame.
    732   }
    733 
    734   __ DropUnderReturnAddress(1);  // Ignore state offset
    735   __ ret(0);  // Return to IC Miss stub, continuation still on stack.
    736 }
    737 
    738 
    739 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
    740   Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
    741 }
    742 
    743 
    744 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
    745   Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
    746 }
    747 
    748 
    749 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
    750                                              Deoptimizer::BailoutType type) {
    751   // Enter an internal frame.
    752   {
    753     FrameScope scope(masm, StackFrame::INTERNAL);
    754 
    755     // Pass the deoptimization type to the runtime system.
    756     __ Push(Smi::FromInt(static_cast<int>(type)));
    757 
    758     __ CallRuntime(Runtime::kHiddenNotifyDeoptimized, 1);
    759     // Tear down internal frame.
    760   }
    761 
    762   // Get the full codegen state from the stack and untag it.
    763   __ SmiToInteger32(kScratchRegister, Operand(rsp, kPCOnStackSize));
    764 
    765   // Switch on the state.
    766   Label not_no_registers, not_tos_rax;
    767   __ cmpp(kScratchRegister, Immediate(FullCodeGenerator::NO_REGISTERS));
    768   __ j(not_equal, &not_no_registers, Label::kNear);
    769   __ ret(1 * kPointerSize);  // Remove state.
    770 
    771   __ bind(&not_no_registers);
    772   __ movp(rax, Operand(rsp, kPCOnStackSize + kPointerSize));
    773   __ cmpp(kScratchRegister, Immediate(FullCodeGenerator::TOS_REG));
    774   __ j(not_equal, &not_tos_rax, Label::kNear);
    775   __ ret(2 * kPointerSize);  // Remove state, rax.
    776 
    777   __ bind(&not_tos_rax);
    778   __ Abort(kNoCasesLeft);
    779 }
    780 
    781 
    782 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
    783   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
    784 }
    785 
    786 
    787 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
    788   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
    789 }
    790 
    791 
    792 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
    793   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
    794 }
    795 
    796 
    797 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
    798   // Stack Layout:
    799   // rsp[0]           : Return address
    800   // rsp[8]           : Argument n
    801   // rsp[16]          : Argument n-1
    802   //  ...
    803   // rsp[8 * n]       : Argument 1
    804   // rsp[8 * (n + 1)] : Receiver (function to call)
    805   //
    806   // rax contains the number of arguments, n, not counting the receiver.
    807   //
    808   // 1. Make sure we have at least one argument.
    809   { Label done;
    810     __ testp(rax, rax);
    811     __ j(not_zero, &done);
    812     __ PopReturnAddressTo(rbx);
    813     __ Push(masm->isolate()->factory()->undefined_value());
    814     __ PushReturnAddressFrom(rbx);
    815     __ incp(rax);
    816     __ bind(&done);
    817   }
    818 
    819   // 2. Get the function to call (passed as receiver) from the stack, check
    820   //    if it is a function.
    821   Label slow, non_function;
    822   StackArgumentsAccessor args(rsp, rax);
    823   __ movp(rdi, args.GetReceiverOperand());
    824   __ JumpIfSmi(rdi, &non_function);
    825   __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
    826   __ j(not_equal, &slow);
    827 
    828   // 3a. Patch the first argument if necessary when calling a function.
    829   Label shift_arguments;
    830   __ Set(rdx, 0);  // indicate regular JS_FUNCTION
    831   { Label convert_to_object, use_global_receiver, patch_receiver;
    832     // Change context eagerly in case we need the global receiver.
    833     __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
    834 
    835     // Do not transform the receiver for strict mode functions.
    836     __ movp(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
    837     __ testb(FieldOperand(rbx, SharedFunctionInfo::kStrictModeByteOffset),
    838              Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
    839     __ j(not_equal, &shift_arguments);
    840 
    841     // Do not transform the receiver for natives.
    842     // SharedFunctionInfo is already loaded into rbx.
    843     __ testb(FieldOperand(rbx, SharedFunctionInfo::kNativeByteOffset),
    844              Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
    845     __ j(not_zero, &shift_arguments);
    846 
    847     // Compute the receiver in sloppy mode.
    848     __ movp(rbx, args.GetArgumentOperand(1));
    849     __ JumpIfSmi(rbx, &convert_to_object, Label::kNear);
    850 
    851     __ CompareRoot(rbx, Heap::kNullValueRootIndex);
    852     __ j(equal, &use_global_receiver);
    853     __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
    854     __ j(equal, &use_global_receiver);
    855 
    856     STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
    857     __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx);
    858     __ j(above_equal, &shift_arguments);
    859 
    860     __ bind(&convert_to_object);
    861     {
    862       // Enter an internal frame in order to preserve argument count.
    863       FrameScope scope(masm, StackFrame::INTERNAL);
    864       __ Integer32ToSmi(rax, rax);
    865       __ Push(rax);
    866 
    867       __ Push(rbx);
    868       __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
    869       __ movp(rbx, rax);
    870       __ Set(rdx, 0);  // indicate regular JS_FUNCTION
    871 
    872       __ Pop(rax);
    873       __ SmiToInteger32(rax, rax);
    874     }
    875 
    876     // Restore the function to rdi.
    877     __ movp(rdi, args.GetReceiverOperand());
    878     __ jmp(&patch_receiver, Label::kNear);
    879 
    880     __ bind(&use_global_receiver);
    881     __ movp(rbx,
    882             Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
    883     __ movp(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
    884 
    885     __ bind(&patch_receiver);
    886     __ movp(args.GetArgumentOperand(1), rbx);
    887 
    888     __ jmp(&shift_arguments);
    889   }
    890 
    891   // 3b. Check for function proxy.
    892   __ bind(&slow);
    893   __ Set(rdx, 1);  // indicate function proxy
    894   __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE);
    895   __ j(equal, &shift_arguments);
    896   __ bind(&non_function);
    897   __ Set(rdx, 2);  // indicate non-function
    898 
    899   // 3c. Patch the first argument when calling a non-function.  The
    900   //     CALL_NON_FUNCTION builtin expects the non-function callee as
    901   //     receiver, so overwrite the first argument which will ultimately
    902   //     become the receiver.
    903   __ movp(args.GetArgumentOperand(1), rdi);
    904 
    905   // 4. Shift arguments and return address one slot down on the stack
    906   //    (overwriting the original receiver).  Adjust argument count to make
    907   //    the original first argument the new receiver.
    908   __ bind(&shift_arguments);
    909   { Label loop;
    910     __ movp(rcx, rax);
    911     StackArgumentsAccessor args(rsp, rcx);
    912     __ bind(&loop);
    913     __ movp(rbx, args.GetArgumentOperand(1));
    914     __ movp(args.GetArgumentOperand(0), rbx);
    915     __ decp(rcx);
    916     __ j(not_zero, &loop);  // While non-zero.
    917     __ DropUnderReturnAddress(1, rbx);  // Drop one slot under return address.
    918     __ decp(rax);  // One fewer argument (first argument is new receiver).
    919   }
    920 
    921   // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
    922   //     or a function proxy via CALL_FUNCTION_PROXY.
    923   { Label function, non_proxy;
    924     __ testp(rdx, rdx);
    925     __ j(zero, &function);
    926     __ Set(rbx, 0);
    927     __ cmpp(rdx, Immediate(1));
    928     __ j(not_equal, &non_proxy);
    929 
    930     __ PopReturnAddressTo(rdx);
    931     __ Push(rdi);  // re-add proxy object as additional argument
    932     __ PushReturnAddressFrom(rdx);
    933     __ incp(rax);
    934     __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
    935     __ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
    936            RelocInfo::CODE_TARGET);
    937 
    938     __ bind(&non_proxy);
    939     __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
    940     __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
    941             RelocInfo::CODE_TARGET);
    942     __ bind(&function);
    943   }
    944 
    945   // 5b. Get the code to call from the function and check that the number of
    946   //     expected arguments matches what we're providing.  If so, jump
    947   //     (tail-call) to the code in register edx without checking arguments.
    948   __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
    949   __ LoadSharedFunctionInfoSpecialField(rbx, rdx,
    950       SharedFunctionInfo::kFormalParameterCountOffset);
    951   __ movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
    952   __ cmpp(rax, rbx);
    953   __ j(not_equal,
    954        masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
    955        RelocInfo::CODE_TARGET);
    956 
    957   ParameterCount expected(0);
    958   __ InvokeCode(rdx, expected, expected, JUMP_FUNCTION, NullCallWrapper());
    959 }
    960 
    961 
    962 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
    963   // Stack at entry:
    964   // rsp     : return address
    965   // rsp[8]  : arguments
    966   // rsp[16] : receiver ("this")
    967   // rsp[24] : function
    968   {
    969     FrameScope frame_scope(masm, StackFrame::INTERNAL);
    970     // Stack frame:
    971     // rbp     : Old base pointer
    972     // rbp[8]  : return address
    973     // rbp[16] : function arguments
    974     // rbp[24] : receiver
    975     // rbp[32] : function
    976     static const int kArgumentsOffset = kFPOnStackSize + kPCOnStackSize;
    977     static const int kReceiverOffset = kArgumentsOffset + kPointerSize;
    978     static const int kFunctionOffset = kReceiverOffset + kPointerSize;
    979 
    980     __ Push(Operand(rbp, kFunctionOffset));
    981     __ Push(Operand(rbp, kArgumentsOffset));
    982     __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
    983 
    984     // Check the stack for overflow. We are not trying to catch
    985     // interruptions (e.g. debug break and preemption) here, so the "real stack
    986     // limit" is checked.
    987     Label okay;
    988     __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
    989     __ movp(rcx, rsp);
    990     // Make rcx the space we have left. The stack might already be overflowed
    991     // here which will cause rcx to become negative.
    992     __ subp(rcx, kScratchRegister);
    993     // Make rdx the space we need for the array when it is unrolled onto the
    994     // stack.
    995     __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2);
    996     // Check if the arguments will overflow the stack.
    997     __ cmpp(rcx, rdx);
    998     __ j(greater, &okay);  // Signed comparison.
    999 
   1000     // Out of stack space.
   1001     __ Push(Operand(rbp, kFunctionOffset));
   1002     __ Push(rax);
   1003     __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
   1004     __ bind(&okay);
   1005     // End of stack check.
   1006 
   1007     // Push current index and limit.
   1008     const int kLimitOffset =
   1009         StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize;
   1010     const int kIndexOffset = kLimitOffset - 1 * kPointerSize;
   1011     __ Push(rax);  // limit
   1012     __ Push(Immediate(0));  // index
   1013 
   1014     // Get the receiver.
   1015     __ movp(rbx, Operand(rbp, kReceiverOffset));
   1016 
   1017     // Check that the function is a JS function (otherwise it must be a proxy).
   1018     Label push_receiver;
   1019     __ movp(rdi, Operand(rbp, kFunctionOffset));
   1020     __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
   1021     __ j(not_equal, &push_receiver);
   1022 
   1023     // Change context eagerly to get the right global object if necessary.
   1024     __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
   1025 
   1026     // Do not transform the receiver for strict mode functions.
   1027     Label call_to_object, use_global_receiver;
   1028     __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
   1029     __ testb(FieldOperand(rdx, SharedFunctionInfo::kStrictModeByteOffset),
   1030              Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
   1031     __ j(not_equal, &push_receiver);
   1032 
   1033     // Do not transform the receiver for natives.
   1034     __ testb(FieldOperand(rdx, SharedFunctionInfo::kNativeByteOffset),
   1035              Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
   1036     __ j(not_equal, &push_receiver);
   1037 
   1038     // Compute the receiver in sloppy mode.
   1039     __ JumpIfSmi(rbx, &call_to_object, Label::kNear);
   1040     __ CompareRoot(rbx, Heap::kNullValueRootIndex);
   1041     __ j(equal, &use_global_receiver);
   1042     __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
   1043     __ j(equal, &use_global_receiver);
   1044 
   1045     // If given receiver is already a JavaScript object then there's no
   1046     // reason for converting it.
   1047     STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
   1048     __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx);
   1049     __ j(above_equal, &push_receiver);
   1050 
   1051     // Convert the receiver to an object.
   1052     __ bind(&call_to_object);
   1053     __ Push(rbx);
   1054     __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
   1055     __ movp(rbx, rax);
   1056     __ jmp(&push_receiver, Label::kNear);
   1057 
   1058     __ bind(&use_global_receiver);
   1059     __ movp(rbx,
   1060             Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
   1061     __ movp(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
   1062 
   1063     // Push the receiver.
   1064     __ bind(&push_receiver);
   1065     __ Push(rbx);
   1066 
   1067     // Copy all arguments from the array to the stack.
   1068     Label entry, loop;
   1069     __ movp(rax, Operand(rbp, kIndexOffset));
   1070     __ jmp(&entry);
   1071     __ bind(&loop);
   1072     __ movp(rdx, Operand(rbp, kArgumentsOffset));  // load arguments
   1073 
   1074     // Use inline caching to speed up access to arguments.
   1075     Handle<Code> ic =
   1076         masm->isolate()->builtins()->KeyedLoadIC_Initialize();
   1077     __ Call(ic, RelocInfo::CODE_TARGET);
   1078     // It is important that we do not have a test instruction after the
   1079     // call.  A test instruction after the call is used to indicate that
   1080     // we have generated an inline version of the keyed load.  In this
   1081     // case, we know that we are not generating a test instruction next.
   1082 
   1083     // Push the nth argument.
   1084     __ Push(rax);
   1085 
   1086     // Update the index on the stack and in register rax.
   1087     __ movp(rax, Operand(rbp, kIndexOffset));
   1088     __ SmiAddConstant(rax, rax, Smi::FromInt(1));
   1089     __ movp(Operand(rbp, kIndexOffset), rax);
   1090 
   1091     __ bind(&entry);
   1092     __ cmpp(rax, Operand(rbp, kLimitOffset));
   1093     __ j(not_equal, &loop);
   1094 
   1095     // Call the function.
   1096     Label call_proxy;
   1097     ParameterCount actual(rax);
   1098     __ SmiToInteger32(rax, rax);
   1099     __ movp(rdi, Operand(rbp, kFunctionOffset));
   1100     __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
   1101     __ j(not_equal, &call_proxy);
   1102     __ InvokeFunction(rdi, actual, CALL_FUNCTION, NullCallWrapper());
   1103 
   1104     frame_scope.GenerateLeaveFrame();
   1105     __ ret(3 * kPointerSize);  // remove this, receiver, and arguments
   1106 
   1107     // Call the function proxy.
   1108     __ bind(&call_proxy);
   1109     __ Push(rdi);  // add function proxy as last argument
   1110     __ incp(rax);
   1111     __ Set(rbx, 0);
   1112     __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
   1113     __ call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
   1114             RelocInfo::CODE_TARGET);
   1115 
   1116     // Leave internal frame.
   1117   }
   1118   __ ret(3 * kPointerSize);  // remove this, receiver, and arguments
   1119 }
   1120 
   1121 
   1122 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
   1123   // ----------- S t a t e -------------
   1124   //  -- rax    : argc
   1125   //  -- rsp[0] : return address
   1126   //  -- rsp[8] : last argument
   1127   // -----------------------------------
   1128   Label generic_array_code;
   1129 
   1130   // Get the InternalArray function.
   1131   __ LoadGlobalFunction(Context::INTERNAL_ARRAY_FUNCTION_INDEX, rdi);
   1132 
   1133   if (FLAG_debug_code) {
   1134     // Initial map for the builtin InternalArray functions should be maps.
   1135     __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
   1136     // Will both indicate a NULL and a Smi.
   1137     STATIC_ASSERT(kSmiTag == 0);
   1138     Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
   1139     __ Check(not_smi, kUnexpectedInitialMapForInternalArrayFunction);
   1140     __ CmpObjectType(rbx, MAP_TYPE, rcx);
   1141     __ Check(equal, kUnexpectedInitialMapForInternalArrayFunction);
   1142   }
   1143 
   1144   // Run the native code for the InternalArray function called as a normal
   1145   // function.
   1146   // tail call a stub
   1147   InternalArrayConstructorStub stub(masm->isolate());
   1148   __ TailCallStub(&stub);
   1149 }
   1150 
   1151 
   1152 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
   1153   // ----------- S t a t e -------------
   1154   //  -- rax    : argc
   1155   //  -- rsp[0] : return address
   1156   //  -- rsp[8] : last argument
   1157   // -----------------------------------
   1158   Label generic_array_code;
   1159 
   1160   // Get the Array function.
   1161   __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rdi);
   1162 
   1163   if (FLAG_debug_code) {
   1164     // Initial map for the builtin Array functions should be maps.
   1165     __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
   1166     // Will both indicate a NULL and a Smi.
   1167     STATIC_ASSERT(kSmiTag == 0);
   1168     Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
   1169     __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
   1170     __ CmpObjectType(rbx, MAP_TYPE, rcx);
   1171     __ Check(equal, kUnexpectedInitialMapForArrayFunction);
   1172   }
   1173 
   1174   // Run the native code for the Array function called as a normal function.
   1175   // tail call a stub
   1176   __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
   1177   ArrayConstructorStub stub(masm->isolate());
   1178   __ TailCallStub(&stub);
   1179 }
   1180 
   1181 
   1182 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
   1183   // ----------- S t a t e -------------
   1184   //  -- rax                 : number of arguments
   1185   //  -- rdi                 : constructor function
   1186   //  -- rsp[0]              : return address
   1187   //  -- rsp[(argc - n) * 8] : arg[n] (zero-based)
   1188   //  -- rsp[(argc + 1) * 8] : receiver
   1189   // -----------------------------------
   1190   Counters* counters = masm->isolate()->counters();
   1191   __ IncrementCounter(counters->string_ctor_calls(), 1);
   1192 
   1193   if (FLAG_debug_code) {
   1194     __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, rcx);
   1195     __ cmpp(rdi, rcx);
   1196     __ Assert(equal, kUnexpectedStringFunction);
   1197   }
   1198 
   1199   // Load the first argument into rax and get rid of the rest
   1200   // (including the receiver).
   1201   StackArgumentsAccessor args(rsp, rax);
   1202   Label no_arguments;
   1203   __ testp(rax, rax);
   1204   __ j(zero, &no_arguments);
   1205   __ movp(rbx, args.GetArgumentOperand(1));
   1206   __ PopReturnAddressTo(rcx);
   1207   __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
   1208   __ PushReturnAddressFrom(rcx);
   1209   __ movp(rax, rbx);
   1210 
   1211   // Lookup the argument in the number to string cache.
   1212   Label not_cached, argument_is_string;
   1213   __ LookupNumberStringCache(rax,  // Input.
   1214                              rbx,  // Result.
   1215                              rcx,  // Scratch 1.
   1216                              rdx,  // Scratch 2.
   1217                              &not_cached);
   1218   __ IncrementCounter(counters->string_ctor_cached_number(), 1);
   1219   __ bind(&argument_is_string);
   1220 
   1221   // ----------- S t a t e -------------
   1222   //  -- rbx    : argument converted to string
   1223   //  -- rdi    : constructor function
   1224   //  -- rsp[0] : return address
   1225   // -----------------------------------
   1226 
   1227   // Allocate a JSValue and put the tagged pointer into rax.
   1228   Label gc_required;
   1229   __ Allocate(JSValue::kSize,
   1230               rax,  // Result.
   1231               rcx,  // New allocation top (we ignore it).
   1232               no_reg,
   1233               &gc_required,
   1234               TAG_OBJECT);
   1235 
   1236   // Set the map.
   1237   __ LoadGlobalFunctionInitialMap(rdi, rcx);
   1238   if (FLAG_debug_code) {
   1239     __ cmpb(FieldOperand(rcx, Map::kInstanceSizeOffset),
   1240             Immediate(JSValue::kSize >> kPointerSizeLog2));
   1241     __ Assert(equal, kUnexpectedStringWrapperInstanceSize);
   1242     __ cmpb(FieldOperand(rcx, Map::kUnusedPropertyFieldsOffset), Immediate(0));
   1243     __ Assert(equal, kUnexpectedUnusedPropertiesOfStringWrapper);
   1244   }
   1245   __ movp(FieldOperand(rax, HeapObject::kMapOffset), rcx);
   1246 
   1247   // Set properties and elements.
   1248   __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
   1249   __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), rcx);
   1250   __ movp(FieldOperand(rax, JSObject::kElementsOffset), rcx);
   1251 
   1252   // Set the value.
   1253   __ movp(FieldOperand(rax, JSValue::kValueOffset), rbx);
   1254 
   1255   // Ensure the object is fully initialized.
   1256   STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
   1257 
   1258   // We're done. Return.
   1259   __ ret(0);
   1260 
   1261   // The argument was not found in the number to string cache. Check
   1262   // if it's a string already before calling the conversion builtin.
   1263   Label convert_argument;
   1264   __ bind(&not_cached);
   1265   STATIC_ASSERT(kSmiTag == 0);
   1266   __ JumpIfSmi(rax, &convert_argument);
   1267   Condition is_string = masm->IsObjectStringType(rax, rbx, rcx);
   1268   __ j(NegateCondition(is_string), &convert_argument);
   1269   __ movp(rbx, rax);
   1270   __ IncrementCounter(counters->string_ctor_string_value(), 1);
   1271   __ jmp(&argument_is_string);
   1272 
   1273   // Invoke the conversion builtin and put the result into rbx.
   1274   __ bind(&convert_argument);
   1275   __ IncrementCounter(counters->string_ctor_conversions(), 1);
   1276   {
   1277     FrameScope scope(masm, StackFrame::INTERNAL);
   1278     __ Push(rdi);  // Preserve the function.
   1279     __ Push(rax);
   1280     __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
   1281     __ Pop(rdi);
   1282   }
   1283   __ movp(rbx, rax);
   1284   __ jmp(&argument_is_string);
   1285 
   1286   // Load the empty string into rbx, remove the receiver from the
   1287   // stack, and jump back to the case where the argument is a string.
   1288   __ bind(&no_arguments);
   1289   __ LoadRoot(rbx, Heap::kempty_stringRootIndex);
   1290   __ PopReturnAddressTo(rcx);
   1291   __ leap(rsp, Operand(rsp, kPointerSize));
   1292   __ PushReturnAddressFrom(rcx);
   1293   __ jmp(&argument_is_string);
   1294 
   1295   // At this point the argument is already a string. Call runtime to
   1296   // create a string wrapper.
   1297   __ bind(&gc_required);
   1298   __ IncrementCounter(counters->string_ctor_gc_required(), 1);
   1299   {
   1300     FrameScope scope(masm, StackFrame::INTERNAL);
   1301     __ Push(rbx);
   1302     __ CallRuntime(Runtime::kNewStringWrapper, 1);
   1303   }
   1304   __ ret(0);
   1305 }
   1306 
   1307 
   1308 static void ArgumentsAdaptorStackCheck(MacroAssembler* masm,
   1309                                        Label* stack_overflow) {
   1310   // ----------- S t a t e -------------
   1311   //  -- rax : actual number of arguments
   1312   //  -- rbx : expected number of arguments
   1313   //  -- rdi: function (passed through to callee)
   1314   // -----------------------------------
   1315   // Check the stack for overflow. We are not trying to catch
   1316   // interruptions (e.g. debug break and preemption) here, so the "real stack
   1317   // limit" is checked.
   1318   Label okay;
   1319   __ LoadRoot(rdx, Heap::kRealStackLimitRootIndex);
   1320   __ movp(rcx, rsp);
   1321   // Make rcx the space we have left. The stack might already be overflowed
   1322   // here which will cause rcx to become negative.
   1323   __ subp(rcx, rdx);
   1324   // Make rdx the space we need for the array when it is unrolled onto the
   1325   // stack.
   1326   __ movp(rdx, rbx);
   1327   __ shlp(rdx, Immediate(kPointerSizeLog2));
   1328   // Check if the arguments will overflow the stack.
   1329   __ cmpp(rcx, rdx);
   1330   __ j(less_equal, stack_overflow);  // Signed comparison.
   1331 }
   1332 
   1333 
   1334 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
   1335   __ pushq(rbp);
   1336   __ movp(rbp, rsp);
   1337 
   1338   // Store the arguments adaptor context sentinel.
   1339   __ Push(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
   1340 
   1341   // Push the function on the stack.
   1342   __ Push(rdi);
   1343 
   1344   // Preserve the number of arguments on the stack. Must preserve rax,
   1345   // rbx and rcx because these registers are used when copying the
   1346   // arguments and the receiver.
   1347   __ Integer32ToSmi(r8, rax);
   1348   __ Push(r8);
   1349 }
   1350 
   1351 
   1352 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
   1353   // Retrieve the number of arguments from the stack. Number is a Smi.
   1354   __ movp(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
   1355 
   1356   // Leave the frame.
   1357   __ movp(rsp, rbp);
   1358   __ popq(rbp);
   1359 
   1360   // Remove caller arguments from the stack.
   1361   __ PopReturnAddressTo(rcx);
   1362   SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
   1363   __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
   1364   __ PushReturnAddressFrom(rcx);
   1365 }
   1366 
   1367 
   1368 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
   1369   // ----------- S t a t e -------------
   1370   //  -- rax : actual number of arguments
   1371   //  -- rbx : expected number of arguments
   1372   //  -- rdi: function (passed through to callee)
   1373   // -----------------------------------
   1374 
   1375   Label invoke, dont_adapt_arguments;
   1376   Counters* counters = masm->isolate()->counters();
   1377   __ IncrementCounter(counters->arguments_adaptors(), 1);
   1378 
   1379   Label stack_overflow;
   1380   ArgumentsAdaptorStackCheck(masm, &stack_overflow);
   1381 
   1382   Label enough, too_few;
   1383   __ movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
   1384   __ cmpp(rax, rbx);
   1385   __ j(less, &too_few);
   1386   __ cmpp(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
   1387   __ j(equal, &dont_adapt_arguments);
   1388 
   1389   {  // Enough parameters: Actual >= expected.
   1390     __ bind(&enough);
   1391     EnterArgumentsAdaptorFrame(masm);
   1392 
   1393     // Copy receiver and all expected arguments.
   1394     const int offset = StandardFrameConstants::kCallerSPOffset;
   1395     __ leap(rax, Operand(rbp, rax, times_pointer_size, offset));
   1396     __ Set(r8, -1);  // account for receiver
   1397 
   1398     Label copy;
   1399     __ bind(&copy);
   1400     __ incp(r8);
   1401     __ Push(Operand(rax, 0));
   1402     __ subp(rax, Immediate(kPointerSize));
   1403     __ cmpp(r8, rbx);
   1404     __ j(less, &copy);
   1405     __ jmp(&invoke);
   1406   }
   1407 
   1408   {  // Too few parameters: Actual < expected.
   1409     __ bind(&too_few);
   1410     EnterArgumentsAdaptorFrame(masm);
   1411 
   1412     // Copy receiver and all actual arguments.
   1413     const int offset = StandardFrameConstants::kCallerSPOffset;
   1414     __ leap(rdi, Operand(rbp, rax, times_pointer_size, offset));
   1415     __ Set(r8, -1);  // account for receiver
   1416 
   1417     Label copy;
   1418     __ bind(&copy);
   1419     __ incp(r8);
   1420     __ Push(Operand(rdi, 0));
   1421     __ subp(rdi, Immediate(kPointerSize));
   1422     __ cmpp(r8, rax);
   1423     __ j(less, &copy);
   1424 
   1425     // Fill remaining expected arguments with undefined values.
   1426     Label fill;
   1427     __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
   1428     __ bind(&fill);
   1429     __ incp(r8);
   1430     __ Push(kScratchRegister);
   1431     __ cmpp(r8, rbx);
   1432     __ j(less, &fill);
   1433 
   1434     // Restore function pointer.
   1435     __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
   1436   }
   1437 
   1438   // Call the entry point.
   1439   __ bind(&invoke);
   1440   __ call(rdx);
   1441 
   1442   // Store offset of return address for deoptimizer.
   1443   masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
   1444 
   1445   // Leave frame and return.
   1446   LeaveArgumentsAdaptorFrame(masm);
   1447   __ ret(0);
   1448 
   1449   // -------------------------------------------
   1450   // Dont adapt arguments.
   1451   // -------------------------------------------
   1452   __ bind(&dont_adapt_arguments);
   1453   __ jmp(rdx);
   1454 
   1455   __ bind(&stack_overflow);
   1456   {
   1457     FrameScope frame(masm, StackFrame::MANUAL);
   1458     EnterArgumentsAdaptorFrame(masm);
   1459     __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
   1460     __ int3();
   1461   }
   1462 }
   1463 
   1464 
   1465 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
   1466   // Lookup the function in the JavaScript frame.
   1467   __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
   1468   {
   1469     FrameScope scope(masm, StackFrame::INTERNAL);
   1470     // Pass function as argument.
   1471     __ Push(rax);
   1472     __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
   1473   }
   1474 
   1475   Label skip;
   1476   // If the code object is null, just return to the unoptimized code.
   1477   __ cmpp(rax, Immediate(0));
   1478   __ j(not_equal, &skip, Label::kNear);
   1479   __ ret(0);
   1480 
   1481   __ bind(&skip);
   1482 
   1483   // Load deoptimization data from the code object.
   1484   __ movp(rbx, Operand(rax, Code::kDeoptimizationDataOffset - kHeapObjectTag));
   1485 
   1486   // Load the OSR entrypoint offset from the deoptimization data.
   1487   __ SmiToInteger32(rbx, Operand(rbx, FixedArray::OffsetOfElementAt(
   1488       DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag));
   1489 
   1490   // Compute the target address = code_obj + header_size + osr_offset
   1491   __ leap(rax, Operand(rax, rbx, times_1, Code::kHeaderSize - kHeapObjectTag));
   1492 
   1493   // Overwrite the return address on the stack.
   1494   __ movq(StackOperandForReturnAddress(0), rax);
   1495 
   1496   // And "return" to the OSR entry point of the function.
   1497   __ ret(0);
   1498 }
   1499 
   1500 
   1501 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
   1502   // We check the stack limit as indicator that recompilation might be done.
   1503   Label ok;
   1504   __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
   1505   __ j(above_equal, &ok);
   1506   {
   1507     FrameScope scope(masm, StackFrame::INTERNAL);
   1508     __ CallRuntime(Runtime::kHiddenStackGuard, 0);
   1509   }
   1510   __ jmp(masm->isolate()->builtins()->OnStackReplacement(),
   1511          RelocInfo::CODE_TARGET);
   1512 
   1513   __ bind(&ok);
   1514   __ ret(0);
   1515 }
   1516 
   1517 
   1518 #undef __
   1519 
   1520 } }  // namespace v8::internal
   1521 
   1522 #endif  // V8_TARGET_ARCH_X64
   1523