Home | History | Annotate | Download | only in x64
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 
     28 #include "v8.h"
     29 
     30 #if defined(V8_TARGET_ARCH_X64)
     31 
     32 #include "codegen.h"
     33 #include "deoptimizer.h"
     34 #include "full-codegen.h"
     35 
     36 namespace v8 {
     37 namespace internal {
     38 
     39 
     40 #define __ ACCESS_MASM(masm)
     41 
     42 
     43 void Builtins::Generate_Adaptor(MacroAssembler* masm,
     44                                 CFunctionId id,
     45                                 BuiltinExtraArguments extra_args) {
     46   // ----------- S t a t e -------------
     47   //  -- rax                : number of arguments excluding receiver
     48   //  -- rdi                : called function (only guaranteed when
     49   //                          extra_args requires it)
     50   //  -- rsi                : context
     51   //  -- rsp[0]             : return address
     52   //  -- rsp[8]             : last argument
     53   //  -- ...
     54   //  -- rsp[8 * argc]      : first argument (argc == rax)
     55   //  -- rsp[8 * (argc +1)] : receiver
     56   // -----------------------------------
     57 
     58   // Insert extra arguments.
     59   int num_extra_args = 0;
     60   if (extra_args == NEEDS_CALLED_FUNCTION) {
     61     num_extra_args = 1;
     62     __ pop(kScratchRegister);  // Save return address.
     63     __ push(rdi);
     64     __ push(kScratchRegister);  // Restore return address.
     65   } else {
     66     ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
     67   }
     68 
     69   // JumpToExternalReference expects rax to contain the number of arguments
     70   // including the receiver and the extra arguments.
     71   __ addq(rax, Immediate(num_extra_args + 1));
     72   __ JumpToExternalReference(ExternalReference(id, masm->isolate()), 1);
     73 }
     74 
     75 
     76 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
     77                                            bool is_api_function,
     78                                            bool count_constructions) {
     79   // ----------- S t a t e -------------
     80   //  -- rax: number of arguments
     81   //  -- rdi: constructor function
     82   // -----------------------------------
     83 
     84   // Should never count constructions for api objects.
     85   ASSERT(!is_api_function || !count_constructions);
     86 
     87   // Enter a construct frame.
     88   {
     89     FrameScope scope(masm, StackFrame::CONSTRUCT);
     90 
     91     // Store a smi-tagged arguments count on the stack.
     92     __ Integer32ToSmi(rax, rax);
     93     __ push(rax);
     94 
     95     // Push the function to invoke on the stack.
     96     __ push(rdi);
     97 
     98     // Try to allocate the object without transitioning into C code. If any of
     99     // the preconditions is not met, the code bails out to the runtime call.
    100     Label rt_call, allocated;
    101     if (FLAG_inline_new) {
    102       Label undo_allocation;
    103 
    104 #ifdef ENABLE_DEBUGGER_SUPPORT
    105       ExternalReference debug_step_in_fp =
    106           ExternalReference::debug_step_in_fp_address(masm->isolate());
    107       __ movq(kScratchRegister, debug_step_in_fp);
    108       __ cmpq(Operand(kScratchRegister, 0), Immediate(0));
    109       __ j(not_equal, &rt_call);
    110 #endif
    111 
    112       // Verified that the constructor is a JSFunction.
    113       // Load the initial map and verify that it is in fact a map.
    114       // rdi: constructor
    115       __ movq(rax, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
    116       // Will both indicate a NULL and a Smi
    117       ASSERT(kSmiTag == 0);
    118       __ JumpIfSmi(rax, &rt_call);
    119       // rdi: constructor
    120       // rax: initial map (if proven valid below)
    121       __ CmpObjectType(rax, MAP_TYPE, rbx);
    122       __ j(not_equal, &rt_call);
    123 
    124       // Check that the constructor is not constructing a JSFunction (see
    125       // comments in Runtime_NewObject in runtime.cc). In which case the
    126       // initial map's instance type would be JS_FUNCTION_TYPE.
    127       // rdi: constructor
    128       // rax: initial map
    129       __ CmpInstanceType(rax, JS_FUNCTION_TYPE);
    130       __ j(equal, &rt_call);
    131 
    132       if (count_constructions) {
    133         Label allocate;
    134         // Decrease generous allocation count.
    135         __ movq(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
    136         __ decb(FieldOperand(rcx,
    137                              SharedFunctionInfo::kConstructionCountOffset));
    138         __ j(not_zero, &allocate);
    139 
    140         __ push(rax);
    141         __ push(rdi);
    142 
    143         __ push(rdi);  // constructor
    144         // The call will replace the stub, so the countdown is only done once.
    145         __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
    146 
    147         __ pop(rdi);
    148         __ pop(rax);
    149 
    150         __ bind(&allocate);
    151       }
    152 
    153       // Now allocate the JSObject on the heap.
    154       __ movzxbq(rdi, FieldOperand(rax, Map::kInstanceSizeOffset));
    155       __ shl(rdi, Immediate(kPointerSizeLog2));
    156       // rdi: size of new object
    157       __ AllocateInNewSpace(rdi,
    158                             rbx,
    159                             rdi,
    160                             no_reg,
    161                             &rt_call,
    162                             NO_ALLOCATION_FLAGS);
    163       // Allocated the JSObject, now initialize the fields.
    164       // rax: initial map
    165       // rbx: JSObject (not HeapObject tagged - the actual address).
    166       // rdi: start of next object
    167       __ movq(Operand(rbx, JSObject::kMapOffset), rax);
    168       __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
    169       __ movq(Operand(rbx, JSObject::kPropertiesOffset), rcx);
    170       __ movq(Operand(rbx, JSObject::kElementsOffset), rcx);
    171       // Set extra fields in the newly allocated object.
    172       // rax: initial map
    173       // rbx: JSObject
    174       // rdi: start of next object
    175       __ lea(rcx, Operand(rbx, JSObject::kHeaderSize));
    176       __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
    177       if (count_constructions) {
    178         __ movzxbq(rsi,
    179                    FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset));
    180         __ lea(rsi,
    181                Operand(rbx, rsi, times_pointer_size, JSObject::kHeaderSize));
    182         // rsi: offset of first field after pre-allocated fields
    183         if (FLAG_debug_code) {
    184           __ cmpq(rsi, rdi);
    185           __ Assert(less_equal,
    186                     "Unexpected number of pre-allocated property fields.");
    187         }
    188         __ InitializeFieldsWithFiller(rcx, rsi, rdx);
    189         __ LoadRoot(rdx, Heap::kOnePointerFillerMapRootIndex);
    190       }
    191       __ InitializeFieldsWithFiller(rcx, rdi, rdx);
    192 
    193       // Add the object tag to make the JSObject real, so that we can continue
    194       // and jump into the continuation code at any time from now on. Any
    195       // failures need to undo the allocation, so that the heap is in a
    196       // consistent state and verifiable.
    197       // rax: initial map
    198       // rbx: JSObject
    199       // rdi: start of next object
    200       __ or_(rbx, Immediate(kHeapObjectTag));
    201 
    202       // Check if a non-empty properties array is needed.
    203       // Allocate and initialize a FixedArray if it is.
    204       // rax: initial map
    205       // rbx: JSObject
    206       // rdi: start of next object
    207       // Calculate total properties described map.
    208       __ movzxbq(rdx, FieldOperand(rax, Map::kUnusedPropertyFieldsOffset));
    209       __ movzxbq(rcx,
    210                  FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset));
    211       __ addq(rdx, rcx);
    212       // Calculate unused properties past the end of the in-object properties.
    213       __ movzxbq(rcx, FieldOperand(rax, Map::kInObjectPropertiesOffset));
    214       __ subq(rdx, rcx);
    215       // Done if no extra properties are to be allocated.
    216       __ j(zero, &allocated);
    217       __ Assert(positive, "Property allocation count failed.");
    218 
    219       // Scale the number of elements by pointer size and add the header for
    220       // FixedArrays to the start of the next object calculation from above.
    221       // rbx: JSObject
    222       // rdi: start of next object (will be start of FixedArray)
    223       // rdx: number of elements in properties array
    224       __ AllocateInNewSpace(FixedArray::kHeaderSize,
    225                             times_pointer_size,
    226                             rdx,
    227                             rdi,
    228                             rax,
    229                             no_reg,
    230                             &undo_allocation,
    231                             RESULT_CONTAINS_TOP);
    232 
    233       // Initialize the FixedArray.
    234       // rbx: JSObject
    235       // rdi: FixedArray
    236       // rdx: number of elements
    237       // rax: start of next object
    238       __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex);
    239       __ movq(Operand(rdi, HeapObject::kMapOffset), rcx);  // setup the map
    240       __ Integer32ToSmi(rdx, rdx);
    241       __ movq(Operand(rdi, FixedArray::kLengthOffset), rdx);  // and length
    242 
    243       // Initialize the fields to undefined.
    244       // rbx: JSObject
    245       // rdi: FixedArray
    246       // rax: start of next object
    247       // rdx: number of elements
    248       { Label loop, entry;
    249         __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
    250         __ lea(rcx, Operand(rdi, FixedArray::kHeaderSize));
    251         __ jmp(&entry);
    252         __ bind(&loop);
    253         __ movq(Operand(rcx, 0), rdx);
    254         __ addq(rcx, Immediate(kPointerSize));
    255         __ bind(&entry);
    256         __ cmpq(rcx, rax);
    257         __ j(below, &loop);
    258       }
    259 
    260       // Store the initialized FixedArray into the properties field of
    261       // the JSObject
    262       // rbx: JSObject
    263       // rdi: FixedArray
    264       __ or_(rdi, Immediate(kHeapObjectTag));  // add the heap tag
    265       __ movq(FieldOperand(rbx, JSObject::kPropertiesOffset), rdi);
    266 
    267 
    268       // Continue with JSObject being successfully allocated
    269       // rbx: JSObject
    270       __ jmp(&allocated);
    271 
    272       // Undo the setting of the new top so that the heap is verifiable. For
    273       // example, the map's unused properties potentially do not match the
    274       // allocated objects unused properties.
    275       // rbx: JSObject (previous new top)
    276       __ bind(&undo_allocation);
    277       __ UndoAllocationInNewSpace(rbx);
    278     }
    279 
    280     // Allocate the new receiver object using the runtime call.
    281     // rdi: function (constructor)
    282     __ bind(&rt_call);
    283     // Must restore rdi (constructor) before calling runtime.
    284     __ movq(rdi, Operand(rsp, 0));
    285     __ push(rdi);
    286     __ CallRuntime(Runtime::kNewObject, 1);
    287     __ movq(rbx, rax);  // store result in rbx
    288 
    289     // New object allocated.
    290     // rbx: newly allocated object
    291     __ bind(&allocated);
    292     // Retrieve the function from the stack.
    293     __ pop(rdi);
    294 
    295     // Retrieve smi-tagged arguments count from the stack.
    296     __ movq(rax, Operand(rsp, 0));
    297     __ SmiToInteger32(rax, rax);
    298 
    299     // Push the allocated receiver to the stack. We need two copies
    300     // because we may have to return the original one and the calling
    301     // conventions dictate that the called function pops the receiver.
    302     __ push(rbx);
    303     __ push(rbx);
    304 
    305     // Set up pointer to last argument.
    306     __ lea(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
    307 
    308     // Copy arguments and receiver to the expression stack.
    309     Label loop, entry;
    310     __ movq(rcx, rax);
    311     __ jmp(&entry);
    312     __ bind(&loop);
    313     __ push(Operand(rbx, rcx, times_pointer_size, 0));
    314     __ bind(&entry);
    315     __ decq(rcx);
    316     __ j(greater_equal, &loop);
    317 
    318     // Call the function.
    319     if (is_api_function) {
    320       __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
    321       Handle<Code> code =
    322           masm->isolate()->builtins()->HandleApiCallConstruct();
    323       ParameterCount expected(0);
    324       __ InvokeCode(code, expected, expected, RelocInfo::CODE_TARGET,
    325                     CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
    326     } else {
    327       ParameterCount actual(rax);
    328       __ InvokeFunction(rdi, actual, CALL_FUNCTION,
    329                         NullCallWrapper(), CALL_AS_METHOD);
    330     }
    331 
    332     // Store offset of return address for deoptimizer.
    333     if (!is_api_function && !count_constructions) {
    334       masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
    335     }
    336 
    337     // Restore context from the frame.
    338     __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
    339 
    340     // If the result is an object (in the ECMA sense), we should get rid
    341     // of the receiver and use the result; see ECMA-262 section 13.2.2-7
    342     // on page 74.
    343     Label use_receiver, exit;
    344     // If the result is a smi, it is *not* an object in the ECMA sense.
    345     __ JumpIfSmi(rax, &use_receiver);
    346 
    347     // If the type of the result (stored in its map) is less than
    348     // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
    349     STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
    350     __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
    351     __ j(above_equal, &exit);
    352 
    353     // Throw away the result of the constructor invocation and use the
    354     // on-stack receiver as the result.
    355     __ bind(&use_receiver);
    356     __ movq(rax, Operand(rsp, 0));
    357 
    358     // Restore the arguments count and leave the construct frame.
    359     __ bind(&exit);
    360     __ movq(rbx, Operand(rsp, kPointerSize));  // Get arguments count.
    361 
    362     // Leave construct frame.
    363   }
    364 
    365   // Remove caller arguments from the stack and return.
    366   __ pop(rcx);
    367   SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
    368   __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
    369   __ push(rcx);
    370   Counters* counters = masm->isolate()->counters();
    371   __ IncrementCounter(counters->constructed_objects(), 1);
    372   __ ret(0);
    373 }
    374 
    375 
    376 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
    377   Generate_JSConstructStubHelper(masm, false, true);
    378 }
    379 
    380 
    381 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
    382   Generate_JSConstructStubHelper(masm, false, false);
    383 }
    384 
    385 
    386 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
    387   Generate_JSConstructStubHelper(masm, true, false);
    388 }
    389 
    390 
    391 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
    392                                              bool is_construct) {
    393   // Expects five C++ function parameters.
    394   // - Address entry (ignored)
    395   // - JSFunction* function (
    396   // - Object* receiver
    397   // - int argc
    398   // - Object*** argv
    399   // (see Handle::Invoke in execution.cc).
    400 
    401   // Open a C++ scope for the FrameScope.
    402   {
    403     // Platform specific argument handling. After this, the stack contains
    404     // an internal frame and the pushed function and receiver, and
    405     // register rax and rbx holds the argument count and argument array,
    406     // while rdi holds the function pointer and rsi the context.
    407 
    408 #ifdef _WIN64
    409     // MSVC parameters in:
    410     // rcx : entry (ignored)
    411     // rdx : function
    412     // r8 : receiver
    413     // r9 : argc
    414     // [rsp+0x20] : argv
    415 
    416     // Clear the context before we push it when entering the internal frame.
    417     __ Set(rsi, 0);
    418     // Enter an internal frame.
    419     FrameScope scope(masm, StackFrame::INTERNAL);
    420 
    421     // Load the function context into rsi.
    422     __ movq(rsi, FieldOperand(rdx, JSFunction::kContextOffset));
    423 
    424     // Push the function and the receiver onto the stack.
    425     __ push(rdx);
    426     __ push(r8);
    427 
    428     // Load the number of arguments and setup pointer to the arguments.
    429     __ movq(rax, r9);
    430     // Load the previous frame pointer to access C argument on stack
    431     __ movq(kScratchRegister, Operand(rbp, 0));
    432     __ movq(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
    433     // Load the function pointer into rdi.
    434     __ movq(rdi, rdx);
    435 #else  // _WIN64
    436     // GCC parameters in:
    437     // rdi : entry (ignored)
    438     // rsi : function
    439     // rdx : receiver
    440     // rcx : argc
    441     // r8  : argv
    442 
    443     __ movq(rdi, rsi);
    444     // rdi : function
    445 
    446     // Clear the context before we push it when entering the internal frame.
    447     __ Set(rsi, 0);
    448     // Enter an internal frame.
    449     FrameScope scope(masm, StackFrame::INTERNAL);
    450 
    451     // Push the function and receiver and setup the context.
    452     __ push(rdi);
    453     __ push(rdx);
    454     __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
    455 
    456     // Load the number of arguments and setup pointer to the arguments.
    457     __ movq(rax, rcx);
    458     __ movq(rbx, r8);
    459 #endif  // _WIN64
    460 
    461     // Current stack contents:
    462     // [rsp + 2 * kPointerSize ... ]: Internal frame
    463     // [rsp + kPointerSize]         : function
    464     // [rsp]                        : receiver
    465     // Current register contents:
    466     // rax : argc
    467     // rbx : argv
    468     // rsi : context
    469     // rdi : function
    470 
    471     // Copy arguments to the stack in a loop.
    472     // Register rbx points to array of pointers to handle locations.
    473     // Push the values of these handles.
    474     Label loop, entry;
    475     __ Set(rcx, 0);  // Set loop variable to 0.
    476     __ jmp(&entry);
    477     __ bind(&loop);
    478     __ movq(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
    479     __ push(Operand(kScratchRegister, 0));  // dereference handle
    480     __ addq(rcx, Immediate(1));
    481     __ bind(&entry);
    482     __ cmpq(rcx, rax);
    483     __ j(not_equal, &loop);
    484 
    485     // Invoke the code.
    486     if (is_construct) {
    487       // Expects rdi to hold function pointer.
    488       CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
    489       __ CallStub(&stub);
    490     } else {
    491       ParameterCount actual(rax);
    492       // Function must be in rdi.
    493       __ InvokeFunction(rdi, actual, CALL_FUNCTION,
    494                         NullCallWrapper(), CALL_AS_METHOD);
    495     }
    496     // Exit the internal frame. Notice that this also removes the empty
    497     // context and the function left on the stack by the code
    498     // invocation.
    499   }
    500 
    501   // TODO(X64): Is argument correct? Is there a receiver to remove?
    502   __ ret(1 * kPointerSize);  // Remove receiver.
    503 }
    504 
    505 
    506 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
    507   Generate_JSEntryTrampolineHelper(masm, false);
    508 }
    509 
    510 
    511 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
    512   Generate_JSEntryTrampolineHelper(masm, true);
    513 }
    514 
    515 
    516 void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
    517   // Enter an internal frame.
    518   {
    519     FrameScope scope(masm, StackFrame::INTERNAL);
    520 
    521     // Push a copy of the function onto the stack.
    522     __ push(rdi);
    523     // Push call kind information.
    524     __ push(rcx);
    525 
    526     __ push(rdi);  // Function is also the parameter to the runtime call.
    527     __ CallRuntime(Runtime::kLazyCompile, 1);
    528 
    529     // Restore call kind information.
    530     __ pop(rcx);
    531     // Restore receiver.
    532     __ pop(rdi);
    533 
    534     // Tear down internal frame.
    535   }
    536 
    537   // Do a tail-call of the compiled function.
    538   __ lea(rax, FieldOperand(rax, Code::kHeaderSize));
    539   __ jmp(rax);
    540 }
    541 
    542 
    543 void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
    544   // Enter an internal frame.
    545   {
    546     FrameScope scope(masm, StackFrame::INTERNAL);
    547 
    548     // Push a copy of the function onto the stack.
    549     __ push(rdi);
    550     // Push call kind information.
    551     __ push(rcx);
    552 
    553     __ push(rdi);  // Function is also the parameter to the runtime call.
    554     __ CallRuntime(Runtime::kLazyRecompile, 1);
    555 
    556     // Restore call kind information.
    557     __ pop(rcx);
    558     // Restore function.
    559     __ pop(rdi);
    560 
    561     // Tear down internal frame.
    562   }
    563 
    564   // Do a tail-call of the compiled function.
    565   __ lea(rax, FieldOperand(rax, Code::kHeaderSize));
    566   __ jmp(rax);
    567 }
    568 
    569 
    570 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
    571                                              Deoptimizer::BailoutType type) {
    572   // Enter an internal frame.
    573   {
    574     FrameScope scope(masm, StackFrame::INTERNAL);
    575 
    576     // Pass the deoptimization type to the runtime system.
    577     __ Push(Smi::FromInt(static_cast<int>(type)));
    578 
    579     __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
    580     // Tear down internal frame.
    581   }
    582 
    583   // Get the full codegen state from the stack and untag it.
    584   __ SmiToInteger32(rcx, Operand(rsp, 1 * kPointerSize));
    585 
    586   // Switch on the state.
    587   Label not_no_registers, not_tos_rax;
    588   __ cmpq(rcx, Immediate(FullCodeGenerator::NO_REGISTERS));
    589   __ j(not_equal, &not_no_registers, Label::kNear);
    590   __ ret(1 * kPointerSize);  // Remove state.
    591 
    592   __ bind(&not_no_registers);
    593   __ movq(rax, Operand(rsp, 2 * kPointerSize));
    594   __ cmpq(rcx, Immediate(FullCodeGenerator::TOS_REG));
    595   __ j(not_equal, &not_tos_rax, Label::kNear);
    596   __ ret(2 * kPointerSize);  // Remove state, rax.
    597 
    598   __ bind(&not_tos_rax);
    599   __ Abort("no cases left");
    600 }
    601 
    602 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
    603   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
    604 }
    605 
    606 
    607 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
    608   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
    609 }
    610 
    611 
    612 void Builtins::Generate_NotifyOSR(MacroAssembler* masm) {
    613   // For now, we are relying on the fact that Runtime::NotifyOSR
    614   // doesn't do any garbage collection which allows us to save/restore
    615   // the registers without worrying about which of them contain
    616   // pointers. This seems a bit fragile.
    617   __ Pushad();
    618   {
    619     FrameScope scope(masm, StackFrame::INTERNAL);
    620     __ CallRuntime(Runtime::kNotifyOSR, 0);
    621   }
    622   __ Popad();
    623   __ ret(0);
    624 }
    625 
    626 
    627 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
    628   // Stack Layout:
    629   // rsp[0]:   Return address
    630   // rsp[1]:   Argument n
    631   // rsp[2]:   Argument n-1
    632   //  ...
    633   // rsp[n]:   Argument 1
    634   // rsp[n+1]: Receiver (function to call)
    635   //
    636   // rax contains the number of arguments, n, not counting the receiver.
    637   //
    638   // 1. Make sure we have at least one argument.
    639   { Label done;
    640     __ testq(rax, rax);
    641     __ j(not_zero, &done);
    642     __ pop(rbx);
    643     __ Push(masm->isolate()->factory()->undefined_value());
    644     __ push(rbx);
    645     __ incq(rax);
    646     __ bind(&done);
    647   }
    648 
    649   // 2. Get the function to call (passed as receiver) from the stack, check
    650   //    if it is a function.
    651   Label slow, non_function;
    652   // The function to call is at position n+1 on the stack.
    653   __ movq(rdi, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
    654   __ JumpIfSmi(rdi, &non_function);
    655   __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
    656   __ j(not_equal, &slow);
    657 
    658   // 3a. Patch the first argument if necessary when calling a function.
    659   Label shift_arguments;
    660   __ Set(rdx, 0);  // indicate regular JS_FUNCTION
    661   { Label convert_to_object, use_global_receiver, patch_receiver;
    662     // Change context eagerly in case we need the global receiver.
    663     __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
    664 
    665     // Do not transform the receiver for strict mode functions.
    666     __ movq(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
    667     __ testb(FieldOperand(rbx, SharedFunctionInfo::kStrictModeByteOffset),
    668              Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
    669     __ j(not_equal, &shift_arguments);
    670 
    671     // Do not transform the receiver for natives.
    672     // SharedFunctionInfo is already loaded into rbx.
    673     __ testb(FieldOperand(rbx, SharedFunctionInfo::kNativeByteOffset),
    674              Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
    675     __ j(not_zero, &shift_arguments);
    676 
    677     // Compute the receiver in non-strict mode.
    678     __ movq(rbx, Operand(rsp, rax, times_pointer_size, 0));
    679     __ JumpIfSmi(rbx, &convert_to_object, Label::kNear);
    680 
    681     __ CompareRoot(rbx, Heap::kNullValueRootIndex);
    682     __ j(equal, &use_global_receiver);
    683     __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
    684     __ j(equal, &use_global_receiver);
    685 
    686     STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
    687     __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx);
    688     __ j(above_equal, &shift_arguments);
    689 
    690     __ bind(&convert_to_object);
    691     {
    692       // Enter an internal frame in order to preserve argument count.
    693       FrameScope scope(masm, StackFrame::INTERNAL);
    694       __ Integer32ToSmi(rax, rax);
    695       __ push(rax);
    696 
    697       __ push(rbx);
    698       __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
    699       __ movq(rbx, rax);
    700       __ Set(rdx, 0);  // indicate regular JS_FUNCTION
    701 
    702       __ pop(rax);
    703       __ SmiToInteger32(rax, rax);
    704     }
    705 
    706     // Restore the function to rdi.
    707     __ movq(rdi, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
    708     __ jmp(&patch_receiver, Label::kNear);
    709 
    710     // Use the global receiver object from the called function as the
    711     // receiver.
    712     __ bind(&use_global_receiver);
    713     const int kGlobalIndex =
    714         Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
    715     __ movq(rbx, FieldOperand(rsi, kGlobalIndex));
    716     __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalContextOffset));
    717     __ movq(rbx, FieldOperand(rbx, kGlobalIndex));
    718     __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
    719 
    720     __ bind(&patch_receiver);
    721     __ movq(Operand(rsp, rax, times_pointer_size, 0), rbx);
    722 
    723     __ jmp(&shift_arguments);
    724   }
    725 
    726   // 3b. Check for function proxy.
    727   __ bind(&slow);
    728   __ Set(rdx, 1);  // indicate function proxy
    729   __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE);
    730   __ j(equal, &shift_arguments);
    731   __ bind(&non_function);
    732   __ Set(rdx, 2);  // indicate non-function
    733 
    734   // 3c. Patch the first argument when calling a non-function.  The
    735   //     CALL_NON_FUNCTION builtin expects the non-function callee as
    736   //     receiver, so overwrite the first argument which will ultimately
    737   //     become the receiver.
    738   __ movq(Operand(rsp, rax, times_pointer_size, 0), rdi);
    739 
    740   // 4. Shift arguments and return address one slot down on the stack
    741   //    (overwriting the original receiver).  Adjust argument count to make
    742   //    the original first argument the new receiver.
    743   __ bind(&shift_arguments);
    744   { Label loop;
    745     __ movq(rcx, rax);
    746     __ bind(&loop);
    747     __ movq(rbx, Operand(rsp, rcx, times_pointer_size, 0));
    748     __ movq(Operand(rsp, rcx, times_pointer_size, 1 * kPointerSize), rbx);
    749     __ decq(rcx);
    750     __ j(not_sign, &loop);  // While non-negative (to copy return address).
    751     __ pop(rbx);  // Discard copy of return address.
    752     __ decq(rax);  // One fewer argument (first argument is new receiver).
    753   }
    754 
    755   // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
    756   //     or a function proxy via CALL_FUNCTION_PROXY.
    757   { Label function, non_proxy;
    758     __ testq(rdx, rdx);
    759     __ j(zero, &function);
    760     __ Set(rbx, 0);
    761     __ SetCallKind(rcx, CALL_AS_METHOD);
    762     __ cmpq(rdx, Immediate(1));
    763     __ j(not_equal, &non_proxy);
    764 
    765     __ pop(rdx);   // return address
    766     __ push(rdi);  // re-add proxy object as additional argument
    767     __ push(rdx);
    768     __ incq(rax);
    769     __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
    770     __ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
    771            RelocInfo::CODE_TARGET);
    772 
    773     __ bind(&non_proxy);
    774     __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
    775     __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
    776             RelocInfo::CODE_TARGET);
    777     __ bind(&function);
    778   }
    779 
    780   // 5b. Get the code to call from the function and check that the number of
    781   //     expected arguments matches what we're providing.  If so, jump
    782   //     (tail-call) to the code in register edx without checking arguments.
    783   __ movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
    784   __ movsxlq(rbx,
    785              FieldOperand(rdx,
    786                           SharedFunctionInfo::kFormalParameterCountOffset));
    787   __ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
    788   __ SetCallKind(rcx, CALL_AS_METHOD);
    789   __ cmpq(rax, rbx);
    790   __ j(not_equal,
    791        masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
    792        RelocInfo::CODE_TARGET);
    793 
    794   ParameterCount expected(0);
    795   __ InvokeCode(rdx, expected, expected, JUMP_FUNCTION,
    796                 NullCallWrapper(), CALL_AS_METHOD);
    797 }
    798 
    799 
    800 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
    801   // Stack at entry:
    802   //    rsp: return address
    803   //  rsp+8: arguments
    804   // rsp+16: receiver ("this")
    805   // rsp+24: function
    806   {
    807     FrameScope frame_scope(masm, StackFrame::INTERNAL);
    808     // Stack frame:
    809     //    rbp: Old base pointer
    810     // rbp[1]: return address
    811     // rbp[2]: function arguments
    812     // rbp[3]: receiver
    813     // rbp[4]: function
    814     static const int kArgumentsOffset = 2 * kPointerSize;
    815     static const int kReceiverOffset = 3 * kPointerSize;
    816     static const int kFunctionOffset = 4 * kPointerSize;
    817 
    818     __ push(Operand(rbp, kFunctionOffset));
    819     __ push(Operand(rbp, kArgumentsOffset));
    820     __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
    821 
    822     // Check the stack for overflow. We are not trying to catch
    823     // interruptions (e.g. debug break and preemption) here, so the "real stack
    824     // limit" is checked.
    825     Label okay;
    826     __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
    827     __ movq(rcx, rsp);
    828     // Make rcx the space we have left. The stack might already be overflowed
    829     // here which will cause rcx to become negative.
    830     __ subq(rcx, kScratchRegister);
    831     // Make rdx the space we need for the array when it is unrolled onto the
    832     // stack.
    833     __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2);
    834     // Check if the arguments will overflow the stack.
    835     __ cmpq(rcx, rdx);
    836     __ j(greater, &okay);  // Signed comparison.
    837 
    838     // Out of stack space.
    839     __ push(Operand(rbp, kFunctionOffset));
    840     __ push(rax);
    841     __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
    842     __ bind(&okay);
    843     // End of stack check.
    844 
    845     // Push current index and limit.
    846     const int kLimitOffset =
    847         StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize;
    848     const int kIndexOffset = kLimitOffset - 1 * kPointerSize;
    849     __ push(rax);  // limit
    850     __ push(Immediate(0));  // index
    851 
    852     // Get the receiver.
    853     __ movq(rbx, Operand(rbp, kReceiverOffset));
    854 
    855     // Check that the function is a JS function (otherwise it must be a proxy).
    856     Label push_receiver;
    857     __ movq(rdi, Operand(rbp, kFunctionOffset));
    858     __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
    859     __ j(not_equal, &push_receiver);
    860 
    861     // Change context eagerly to get the right global object if necessary.
    862     __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
    863 
    864     // Do not transform the receiver for strict mode functions.
    865     Label call_to_object, use_global_receiver;
    866     __ movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
    867     __ testb(FieldOperand(rdx, SharedFunctionInfo::kStrictModeByteOffset),
    868              Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
    869     __ j(not_equal, &push_receiver);
    870 
    871     // Do not transform the receiver for natives.
    872     __ testb(FieldOperand(rdx, SharedFunctionInfo::kNativeByteOffset),
    873              Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
    874     __ j(not_equal, &push_receiver);
    875 
    876     // Compute the receiver in non-strict mode.
    877     __ JumpIfSmi(rbx, &call_to_object, Label::kNear);
    878     __ CompareRoot(rbx, Heap::kNullValueRootIndex);
    879     __ j(equal, &use_global_receiver);
    880     __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
    881     __ j(equal, &use_global_receiver);
    882 
    883     // If given receiver is already a JavaScript object then there's no
    884     // reason for converting it.
    885     STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
    886     __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx);
    887     __ j(above_equal, &push_receiver);
    888 
    889     // Convert the receiver to an object.
    890     __ bind(&call_to_object);
    891     __ push(rbx);
    892     __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
    893     __ movq(rbx, rax);
    894     __ jmp(&push_receiver, Label::kNear);
    895 
    896     // Use the current global receiver object as the receiver.
    897     __ bind(&use_global_receiver);
    898     const int kGlobalOffset =
    899         Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
    900     __ movq(rbx, FieldOperand(rsi, kGlobalOffset));
    901     __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalContextOffset));
    902     __ movq(rbx, FieldOperand(rbx, kGlobalOffset));
    903     __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
    904 
    905     // Push the receiver.
    906     __ bind(&push_receiver);
    907     __ push(rbx);
    908 
    909     // Copy all arguments from the array to the stack.
    910     Label entry, loop;
    911     __ movq(rax, Operand(rbp, kIndexOffset));
    912     __ jmp(&entry);
    913     __ bind(&loop);
    914     __ movq(rdx, Operand(rbp, kArgumentsOffset));  // load arguments
    915 
    916     // Use inline caching to speed up access to arguments.
    917     Handle<Code> ic =
    918         masm->isolate()->builtins()->KeyedLoadIC_Initialize();
    919     __ Call(ic, RelocInfo::CODE_TARGET);
    920     // It is important that we do not have a test instruction after the
    921     // call.  A test instruction after the call is used to indicate that
    922     // we have generated an inline version of the keyed load.  In this
    923     // case, we know that we are not generating a test instruction next.
    924 
    925     // Push the nth argument.
    926     __ push(rax);
    927 
    928     // Update the index on the stack and in register rax.
    929     __ movq(rax, Operand(rbp, kIndexOffset));
    930     __ SmiAddConstant(rax, rax, Smi::FromInt(1));
    931     __ movq(Operand(rbp, kIndexOffset), rax);
    932 
    933     __ bind(&entry);
    934     __ cmpq(rax, Operand(rbp, kLimitOffset));
    935     __ j(not_equal, &loop);
    936 
    937     // Invoke the function.
    938     Label call_proxy;
    939     ParameterCount actual(rax);
    940     __ SmiToInteger32(rax, rax);
    941     __ movq(rdi, Operand(rbp, kFunctionOffset));
    942     __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
    943     __ j(not_equal, &call_proxy);
    944     __ InvokeFunction(rdi, actual, CALL_FUNCTION,
    945                       NullCallWrapper(), CALL_AS_METHOD);
    946 
    947     frame_scope.GenerateLeaveFrame();
    948     __ ret(3 * kPointerSize);  // remove this, receiver, and arguments
    949 
    950     // Invoke the function proxy.
    951     __ bind(&call_proxy);
    952     __ push(rdi);  // add function proxy as last argument
    953     __ incq(rax);
    954     __ Set(rbx, 0);
    955     __ SetCallKind(rcx, CALL_AS_METHOD);
    956     __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
    957     __ call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
    958             RelocInfo::CODE_TARGET);
    959 
    960     // Leave internal frame.
    961   }
    962   __ ret(3 * kPointerSize);  // remove this, receiver, and arguments
    963 }
    964 
    965 
    966 // Allocate an empty JSArray. The allocated array is put into the result
    967 // register. If the parameter initial_capacity is larger than zero an elements
    968 // backing store is allocated with this size and filled with the hole values.
    969 // Otherwise the elements backing store is set to the empty FixedArray.
    970 static void AllocateEmptyJSArray(MacroAssembler* masm,
    971                                  Register array_function,
    972                                  Register result,
    973                                  Register scratch1,
    974                                  Register scratch2,
    975                                  Register scratch3,
    976                                  Label* gc_required) {
    977   const int initial_capacity = JSArray::kPreallocatedArrayElements;
    978   STATIC_ASSERT(initial_capacity >= 0);
    979 
    980   __ LoadInitialArrayMap(array_function, scratch2, scratch1);
    981 
    982   // Allocate the JSArray object together with space for a fixed array with the
    983   // requested elements.
    984   int size = JSArray::kSize;
    985   if (initial_capacity > 0) {
    986     size += FixedArray::SizeFor(initial_capacity);
    987   }
    988   __ AllocateInNewSpace(size,
    989                         result,
    990                         scratch2,
    991                         scratch3,
    992                         gc_required,
    993                         TAG_OBJECT);
    994 
    995   // Allocated the JSArray. Now initialize the fields except for the elements
    996   // array.
    997   // result: JSObject
    998   // scratch1: initial map
    999   // scratch2: start of next object
   1000   Factory* factory = masm->isolate()->factory();
   1001   __ movq(FieldOperand(result, JSObject::kMapOffset), scratch1);
   1002   __ Move(FieldOperand(result, JSArray::kPropertiesOffset),
   1003           factory->empty_fixed_array());
   1004   // Field JSArray::kElementsOffset is initialized later.
   1005   __ Move(FieldOperand(result, JSArray::kLengthOffset), Smi::FromInt(0));
   1006 
   1007   // If no storage is requested for the elements array just set the empty
   1008   // fixed array.
   1009   if (initial_capacity == 0) {
   1010     __ Move(FieldOperand(result, JSArray::kElementsOffset),
   1011             factory->empty_fixed_array());
   1012     return;
   1013   }
   1014 
   1015   // Calculate the location of the elements array and set elements array member
   1016   // of the JSArray.
   1017   // result: JSObject
   1018   // scratch2: start of next object
   1019   __ lea(scratch1, Operand(result, JSArray::kSize));
   1020   __ movq(FieldOperand(result, JSArray::kElementsOffset), scratch1);
   1021 
   1022   // Initialize the FixedArray and fill it with holes. FixedArray length is
   1023   // stored as a smi.
   1024   // result: JSObject
   1025   // scratch1: elements array
   1026   // scratch2: start of next object
   1027   __ Move(FieldOperand(scratch1, HeapObject::kMapOffset),
   1028           factory->fixed_array_map());
   1029   __ Move(FieldOperand(scratch1, FixedArray::kLengthOffset),
   1030           Smi::FromInt(initial_capacity));
   1031 
   1032   // Fill the FixedArray with the hole value. Inline the code if short.
   1033   // Reconsider loop unfolding if kPreallocatedArrayElements gets changed.
   1034   static const int kLoopUnfoldLimit = 4;
   1035   __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex);
   1036   if (initial_capacity <= kLoopUnfoldLimit) {
   1037     // Use a scratch register here to have only one reloc info when unfolding
   1038     // the loop.
   1039     for (int i = 0; i < initial_capacity; i++) {
   1040       __ movq(FieldOperand(scratch1,
   1041                            FixedArray::kHeaderSize + i * kPointerSize),
   1042               scratch3);
   1043     }
   1044   } else {
   1045     Label loop, entry;
   1046     __ movq(scratch2, Immediate(initial_capacity));
   1047     __ jmp(&entry);
   1048     __ bind(&loop);
   1049     __ movq(FieldOperand(scratch1,
   1050                          scratch2,
   1051                          times_pointer_size,
   1052                          FixedArray::kHeaderSize),
   1053             scratch3);
   1054     __ bind(&entry);
   1055     __ decq(scratch2);
   1056     __ j(not_sign, &loop);
   1057   }
   1058 }
   1059 
   1060 
   1061 // Allocate a JSArray with the number of elements stored in a register. The
   1062 // register array_function holds the built-in Array function and the register
   1063 // array_size holds the size of the array as a smi. The allocated array is put
   1064 // into the result register and beginning and end of the FixedArray elements
   1065 // storage is put into registers elements_array and elements_array_end  (see
   1066 // below for when that is not the case). If the parameter fill_with_holes is
   1067 // true the allocated elements backing store is filled with the hole values
   1068 // otherwise it is left uninitialized. When the backing store is filled the
   1069 // register elements_array is scratched.
   1070 static void AllocateJSArray(MacroAssembler* masm,
   1071                             Register array_function,  // Array function.
   1072                             Register array_size,  // As a smi, cannot be 0.
   1073                             Register result,
   1074                             Register elements_array,
   1075                             Register elements_array_end,
   1076                             Register scratch,
   1077                             bool fill_with_hole,
   1078                             Label* gc_required) {
   1079   __ LoadInitialArrayMap(array_function, scratch, elements_array);
   1080 
   1081   if (FLAG_debug_code) {  // Assert that array size is not zero.
   1082     __ testq(array_size, array_size);
   1083     __ Assert(not_zero, "array size is unexpectedly 0");
   1084   }
   1085 
   1086   // Allocate the JSArray object together with space for a FixedArray with the
   1087   // requested elements.
   1088   SmiIndex index =
   1089       masm->SmiToIndex(kScratchRegister, array_size, kPointerSizeLog2);
   1090   __ AllocateInNewSpace(JSArray::kSize + FixedArray::kHeaderSize,
   1091                         index.scale,
   1092                         index.reg,
   1093                         result,
   1094                         elements_array_end,
   1095                         scratch,
   1096                         gc_required,
   1097                         TAG_OBJECT);
   1098 
   1099   // Allocated the JSArray. Now initialize the fields except for the elements
   1100   // array.
   1101   // result: JSObject
   1102   // elements_array: initial map
   1103   // elements_array_end: start of next object
   1104   // array_size: size of array (smi)
   1105   Factory* factory = masm->isolate()->factory();
   1106   __ movq(FieldOperand(result, JSObject::kMapOffset), elements_array);
   1107   __ Move(elements_array, factory->empty_fixed_array());
   1108   __ movq(FieldOperand(result, JSArray::kPropertiesOffset), elements_array);
   1109   // Field JSArray::kElementsOffset is initialized later.
   1110   __ movq(FieldOperand(result, JSArray::kLengthOffset), array_size);
   1111 
   1112   // Calculate the location of the elements array and set elements array member
   1113   // of the JSArray.
   1114   // result: JSObject
   1115   // elements_array_end: start of next object
   1116   // array_size: size of array (smi)
   1117   __ lea(elements_array, Operand(result, JSArray::kSize));
   1118   __ movq(FieldOperand(result, JSArray::kElementsOffset), elements_array);
   1119 
   1120   // Initialize the fixed array. FixedArray length is stored as a smi.
   1121   // result: JSObject
   1122   // elements_array: elements array
   1123   // elements_array_end: start of next object
   1124   // array_size: size of array (smi)
   1125   __ Move(FieldOperand(elements_array, JSObject::kMapOffset),
   1126           factory->fixed_array_map());
   1127   // For non-empty JSArrays the length of the FixedArray and the JSArray is the
   1128   // same.
   1129   __ movq(FieldOperand(elements_array, FixedArray::kLengthOffset), array_size);
   1130 
   1131   // Fill the allocated FixedArray with the hole value if requested.
   1132   // result: JSObject
   1133   // elements_array: elements array
   1134   // elements_array_end: start of next object
   1135   if (fill_with_hole) {
   1136     Label loop, entry;
   1137     __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
   1138     __ lea(elements_array, Operand(elements_array,
   1139                                    FixedArray::kHeaderSize - kHeapObjectTag));
   1140     __ jmp(&entry);
   1141     __ bind(&loop);
   1142     __ movq(Operand(elements_array, 0), scratch);
   1143     __ addq(elements_array, Immediate(kPointerSize));
   1144     __ bind(&entry);
   1145     __ cmpq(elements_array, elements_array_end);
   1146     __ j(below, &loop);
   1147   }
   1148 }
   1149 
   1150 
   1151 // Create a new array for the built-in Array function. This function allocates
   1152 // the JSArray object and the FixedArray elements array and initializes these.
   1153 // If the Array cannot be constructed in native code the runtime is called. This
   1154 // function assumes the following state:
   1155 //   rdi: constructor (built-in Array function)
   1156 //   rax: argc
   1157 //   rsp[0]: return address
   1158 //   rsp[8]: last argument
   1159 // This function is used for both construct and normal calls of Array. The only
   1160 // difference between handling a construct call and a normal call is that for a
   1161 // construct call the constructor function in rdi needs to be preserved for
   1162 // entering the generic code. In both cases argc in rax needs to be preserved.
   1163 // Both registers are preserved by this code so no need to differentiate between
   1164 // a construct call and a normal call.
   1165 static void ArrayNativeCode(MacroAssembler* masm,
   1166                             Label* call_generic_code) {
   1167   Label argc_one_or_more, argc_two_or_more, empty_array, not_empty_array,
   1168       has_non_smi_element, finish, cant_transition_map, not_double;
   1169 
   1170   // Check for array construction with zero arguments.
   1171   __ testq(rax, rax);
   1172   __ j(not_zero, &argc_one_or_more);
   1173 
   1174   __ bind(&empty_array);
   1175   // Handle construction of an empty array.
   1176   AllocateEmptyJSArray(masm,
   1177                        rdi,
   1178                        rbx,
   1179                        rcx,
   1180                        rdx,
   1181                        r8,
   1182                        call_generic_code);
   1183   Counters* counters = masm->isolate()->counters();
   1184   __ IncrementCounter(counters->array_function_native(), 1);
   1185   __ movq(rax, rbx);
   1186   __ ret(kPointerSize);
   1187 
   1188   // Check for one argument. Bail out if argument is not smi or if it is
   1189   // negative.
   1190   __ bind(&argc_one_or_more);
   1191   __ cmpq(rax, Immediate(1));
   1192   __ j(not_equal, &argc_two_or_more);
   1193   __ movq(rdx, Operand(rsp, kPointerSize));  // Get the argument from the stack.
   1194 
   1195   __ SmiTest(rdx);
   1196   __ j(not_zero, &not_empty_array);
   1197   __ pop(r8);  // Adjust stack.
   1198   __ Drop(1);
   1199   __ push(r8);
   1200   __ movq(rax, Immediate(0));  // Treat this as a call with argc of zero.
   1201   __ jmp(&empty_array);
   1202 
   1203   __ bind(&not_empty_array);
   1204   __ JumpUnlessNonNegativeSmi(rdx, call_generic_code);
   1205 
   1206   // Handle construction of an empty array of a certain size. Bail out if size
   1207   // is to large to actually allocate an elements array.
   1208   __ SmiCompare(rdx, Smi::FromInt(JSObject::kInitialMaxFastElementArray));
   1209   __ j(greater_equal, call_generic_code);
   1210 
   1211   // rax: argc
   1212   // rdx: array_size (smi)
   1213   // rdi: constructor
   1214   // esp[0]: return address
   1215   // esp[8]: argument
   1216   AllocateJSArray(masm,
   1217                   rdi,
   1218                   rdx,
   1219                   rbx,
   1220                   rcx,
   1221                   r8,
   1222                   r9,
   1223                   true,
   1224                   call_generic_code);
   1225   __ IncrementCounter(counters->array_function_native(), 1);
   1226   __ movq(rax, rbx);
   1227   __ ret(2 * kPointerSize);
   1228 
   1229   // Handle construction of an array from a list of arguments.
   1230   __ bind(&argc_two_or_more);
   1231   __ movq(rdx, rax);
   1232   __ Integer32ToSmi(rdx, rdx);  // Convet argc to a smi.
   1233   // rax: argc
   1234   // rdx: array_size (smi)
   1235   // rdi: constructor
   1236   // esp[0] : return address
   1237   // esp[8] : last argument
   1238   AllocateJSArray(masm,
   1239                   rdi,
   1240                   rdx,
   1241                   rbx,
   1242                   rcx,
   1243                   r8,
   1244                   r9,
   1245                   false,
   1246                   call_generic_code);
   1247   __ IncrementCounter(counters->array_function_native(), 1);
   1248 
   1249   // rax: argc
   1250   // rbx: JSArray
   1251   // rcx: elements_array
   1252   // r8: elements_array_end (untagged)
   1253   // esp[0]: return address
   1254   // esp[8]: last argument
   1255 
   1256   // Location of the last argument
   1257   __ lea(r9, Operand(rsp, kPointerSize));
   1258 
   1259   // Location of the first array element (Parameter fill_with_holes to
   1260   // AllocateJSArrayis false, so the FixedArray is returned in rcx).
   1261   __ lea(rdx, Operand(rcx, FixedArray::kHeaderSize - kHeapObjectTag));
   1262 
   1263   // rax: argc
   1264   // rbx: JSArray
   1265   // rdx: location of the first array element
   1266   // r9: location of the last argument
   1267   // esp[0]: return address
   1268   // esp[8]: last argument
   1269   Label loop, entry;
   1270   __ movq(rcx, rax);
   1271   __ jmp(&entry);
   1272   __ bind(&loop);
   1273   __ movq(r8, Operand(r9, rcx, times_pointer_size, 0));
   1274   if (FLAG_smi_only_arrays) {
   1275     __ JumpIfNotSmi(r8, &has_non_smi_element);
   1276   }
   1277   __ movq(Operand(rdx, 0), r8);
   1278   __ addq(rdx, Immediate(kPointerSize));
   1279   __ bind(&entry);
   1280   __ decq(rcx);
   1281   __ j(greater_equal, &loop);
   1282 
   1283   // Remove caller arguments from the stack and return.
   1284   // rax: argc
   1285   // rbx: JSArray
   1286   // esp[0]: return address
   1287   // esp[8]: last argument
   1288   __ bind(&finish);
   1289   __ pop(rcx);
   1290   __ lea(rsp, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
   1291   __ push(rcx);
   1292   __ movq(rax, rbx);
   1293   __ ret(0);
   1294 
   1295   __ bind(&has_non_smi_element);
   1296   // Double values are handled by the runtime.
   1297   __ CheckMap(r8,
   1298               masm->isolate()->factory()->heap_number_map(),
   1299               &not_double,
   1300               DONT_DO_SMI_CHECK);
   1301   __ bind(&cant_transition_map);
   1302   __ UndoAllocationInNewSpace(rbx);
   1303   __ jmp(call_generic_code);
   1304 
   1305   __ bind(&not_double);
   1306   // Transition FAST_SMI_ONLY_ELEMENTS to FAST_ELEMENTS.
   1307   // rbx: JSArray
   1308   __ movq(r11, FieldOperand(rbx, HeapObject::kMapOffset));
   1309   __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
   1310                                          FAST_ELEMENTS,
   1311                                          r11,
   1312                                          kScratchRegister,
   1313                                          &cant_transition_map);
   1314 
   1315   __ movq(FieldOperand(rbx, HeapObject::kMapOffset), r11);
   1316   __ RecordWriteField(rbx, HeapObject::kMapOffset, r11, r8,
   1317                       kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
   1318 
   1319   // Finish the array initialization loop.
   1320   Label loop2;
   1321   __ bind(&loop2);
   1322   __ movq(r8, Operand(r9, rcx, times_pointer_size, 0));
   1323   __ movq(Operand(rdx, 0), r8);
   1324   __ addq(rdx, Immediate(kPointerSize));
   1325   __ decq(rcx);
   1326   __ j(greater_equal, &loop2);
   1327   __ jmp(&finish);
   1328 }
   1329 
   1330 
   1331 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
   1332   // ----------- S t a t e -------------
   1333   //  -- rax : argc
   1334   //  -- rsp[0] : return address
   1335   //  -- rsp[8] : last argument
   1336   // -----------------------------------
   1337   Label generic_array_code;
   1338 
   1339   // Get the InternalArray function.
   1340   __ LoadGlobalFunction(Context::INTERNAL_ARRAY_FUNCTION_INDEX, rdi);
   1341 
   1342   if (FLAG_debug_code) {
   1343     // Initial map for the builtin InternalArray functions should be maps.
   1344     __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
   1345     // Will both indicate a NULL and a Smi.
   1346     STATIC_ASSERT(kSmiTag == 0);
   1347     Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
   1348     __ Check(not_smi, "Unexpected initial map for InternalArray function");
   1349     __ CmpObjectType(rbx, MAP_TYPE, rcx);
   1350     __ Check(equal, "Unexpected initial map for InternalArray function");
   1351   }
   1352 
   1353   // Run the native code for the InternalArray function called as a normal
   1354   // function.
   1355   ArrayNativeCode(masm, &generic_array_code);
   1356 
   1357   // Jump to the generic array code in case the specialized code cannot handle
   1358   // the construction.
   1359   __ bind(&generic_array_code);
   1360   Handle<Code> array_code =
   1361       masm->isolate()->builtins()->InternalArrayCodeGeneric();
   1362   __ Jump(array_code, RelocInfo::CODE_TARGET);
   1363 }
   1364 
   1365 
   1366 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
   1367   // ----------- S t a t e -------------
   1368   //  -- rax : argc
   1369   //  -- rsp[0] : return address
   1370   //  -- rsp[8] : last argument
   1371   // -----------------------------------
   1372   Label generic_array_code;
   1373 
   1374   // Get the Array function.
   1375   __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rdi);
   1376 
   1377   if (FLAG_debug_code) {
   1378     // Initial map for the builtin Array functions should be maps.
   1379     __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
   1380     // Will both indicate a NULL and a Smi.
   1381     STATIC_ASSERT(kSmiTag == 0);
   1382     Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
   1383     __ Check(not_smi, "Unexpected initial map for Array function");
   1384     __ CmpObjectType(rbx, MAP_TYPE, rcx);
   1385     __ Check(equal, "Unexpected initial map for Array function");
   1386   }
   1387 
   1388   // Run the native code for the Array function called as a normal function.
   1389   ArrayNativeCode(masm, &generic_array_code);
   1390 
   1391   // Jump to the generic array code in case the specialized code cannot handle
   1392   // the construction.
   1393   __ bind(&generic_array_code);
   1394   Handle<Code> array_code =
   1395       masm->isolate()->builtins()->ArrayCodeGeneric();
   1396   __ Jump(array_code, RelocInfo::CODE_TARGET);
   1397 }
   1398 
   1399 
   1400 void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
   1401   // ----------- S t a t e -------------
   1402   //  -- rax : argc
   1403   //  -- rdi : constructor
   1404   //  -- rsp[0] : return address
   1405   //  -- rsp[8] : last argument
   1406   // -----------------------------------
   1407   Label generic_constructor;
   1408 
   1409   if (FLAG_debug_code) {
   1410     // The array construct code is only set for the builtin and internal
   1411     // Array functions which always have a map.
   1412     // Initial map for the builtin Array function should be a map.
   1413     __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
   1414     // Will both indicate a NULL and a Smi.
   1415     STATIC_ASSERT(kSmiTag == 0);
   1416     Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
   1417     __ Check(not_smi, "Unexpected initial map for Array function");
   1418     __ CmpObjectType(rbx, MAP_TYPE, rcx);
   1419     __ Check(equal, "Unexpected initial map for Array function");
   1420   }
   1421 
   1422   // Run the native code for the Array function called as constructor.
   1423   ArrayNativeCode(masm, &generic_constructor);
   1424 
   1425   // Jump to the generic construct code in case the specialized code cannot
   1426   // handle the construction.
   1427   __ bind(&generic_constructor);
   1428   Handle<Code> generic_construct_stub =
   1429       masm->isolate()->builtins()->JSConstructStubGeneric();
   1430   __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
   1431 }
   1432 
   1433 
   1434 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
   1435   // ----------- S t a t e -------------
   1436   //  -- rax                 : number of arguments
   1437   //  -- rdi                 : constructor function
   1438   //  -- rsp[0]              : return address
   1439   //  -- rsp[(argc - n) * 8] : arg[n] (zero-based)
   1440   //  -- rsp[(argc + 1) * 8] : receiver
   1441   // -----------------------------------
   1442   Counters* counters = masm->isolate()->counters();
   1443   __ IncrementCounter(counters->string_ctor_calls(), 1);
   1444 
   1445   if (FLAG_debug_code) {
   1446     __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, rcx);
   1447     __ cmpq(rdi, rcx);
   1448     __ Assert(equal, "Unexpected String function");
   1449   }
   1450 
   1451   // Load the first argument into rax and get rid of the rest
   1452   // (including the receiver).
   1453   Label no_arguments;
   1454   __ testq(rax, rax);
   1455   __ j(zero, &no_arguments);
   1456   __ movq(rbx, Operand(rsp, rax, times_pointer_size, 0));
   1457   __ pop(rcx);
   1458   __ lea(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
   1459   __ push(rcx);
   1460   __ movq(rax, rbx);
   1461 
   1462   // Lookup the argument in the number to string cache.
   1463   Label not_cached, argument_is_string;
   1464   NumberToStringStub::GenerateLookupNumberStringCache(
   1465       masm,
   1466       rax,  // Input.
   1467       rbx,  // Result.
   1468       rcx,  // Scratch 1.
   1469       rdx,  // Scratch 2.
   1470       false,  // Input is known to be smi?
   1471       &not_cached);
   1472   __ IncrementCounter(counters->string_ctor_cached_number(), 1);
   1473   __ bind(&argument_is_string);
   1474 
   1475   // ----------- S t a t e -------------
   1476   //  -- rbx    : argument converted to string
   1477   //  -- rdi    : constructor function
   1478   //  -- rsp[0] : return address
   1479   // -----------------------------------
   1480 
   1481   // Allocate a JSValue and put the tagged pointer into rax.
   1482   Label gc_required;
   1483   __ AllocateInNewSpace(JSValue::kSize,
   1484                         rax,  // Result.
   1485                         rcx,  // New allocation top (we ignore it).
   1486                         no_reg,
   1487                         &gc_required,
   1488                         TAG_OBJECT);
   1489 
   1490   // Set the map.
   1491   __ LoadGlobalFunctionInitialMap(rdi, rcx);
   1492   if (FLAG_debug_code) {
   1493     __ cmpb(FieldOperand(rcx, Map::kInstanceSizeOffset),
   1494             Immediate(JSValue::kSize >> kPointerSizeLog2));
   1495     __ Assert(equal, "Unexpected string wrapper instance size");
   1496     __ cmpb(FieldOperand(rcx, Map::kUnusedPropertyFieldsOffset), Immediate(0));
   1497     __ Assert(equal, "Unexpected unused properties of string wrapper");
   1498   }
   1499   __ movq(FieldOperand(rax, HeapObject::kMapOffset), rcx);
   1500 
   1501   // Set properties and elements.
   1502   __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
   1503   __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), rcx);
   1504   __ movq(FieldOperand(rax, JSObject::kElementsOffset), rcx);
   1505 
   1506   // Set the value.
   1507   __ movq(FieldOperand(rax, JSValue::kValueOffset), rbx);
   1508 
   1509   // Ensure the object is fully initialized.
   1510   STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
   1511 
   1512   // We're done. Return.
   1513   __ ret(0);
   1514 
   1515   // The argument was not found in the number to string cache. Check
   1516   // if it's a string already before calling the conversion builtin.
   1517   Label convert_argument;
   1518   __ bind(&not_cached);
   1519   STATIC_ASSERT(kSmiTag == 0);
   1520   __ JumpIfSmi(rax, &convert_argument);
   1521   Condition is_string = masm->IsObjectStringType(rax, rbx, rcx);
   1522   __ j(NegateCondition(is_string), &convert_argument);
   1523   __ movq(rbx, rax);
   1524   __ IncrementCounter(counters->string_ctor_string_value(), 1);
   1525   __ jmp(&argument_is_string);
   1526 
   1527   // Invoke the conversion builtin and put the result into rbx.
   1528   __ bind(&convert_argument);
   1529   __ IncrementCounter(counters->string_ctor_conversions(), 1);
   1530   {
   1531     FrameScope scope(masm, StackFrame::INTERNAL);
   1532     __ push(rdi);  // Preserve the function.
   1533     __ push(rax);
   1534     __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
   1535     __ pop(rdi);
   1536   }
   1537   __ movq(rbx, rax);
   1538   __ jmp(&argument_is_string);
   1539 
   1540   // Load the empty string into rbx, remove the receiver from the
   1541   // stack, and jump back to the case where the argument is a string.
   1542   __ bind(&no_arguments);
   1543   __ LoadRoot(rbx, Heap::kEmptyStringRootIndex);
   1544   __ pop(rcx);
   1545   __ lea(rsp, Operand(rsp, kPointerSize));
   1546   __ push(rcx);
   1547   __ jmp(&argument_is_string);
   1548 
   1549   // At this point the argument is already a string. Call runtime to
   1550   // create a string wrapper.
   1551   __ bind(&gc_required);
   1552   __ IncrementCounter(counters->string_ctor_gc_required(), 1);
   1553   {
   1554     FrameScope scope(masm, StackFrame::INTERNAL);
   1555     __ push(rbx);
   1556     __ CallRuntime(Runtime::kNewStringWrapper, 1);
   1557   }
   1558   __ ret(0);
   1559 }
   1560 
   1561 
   1562 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
   1563   __ push(rbp);
   1564   __ movq(rbp, rsp);
   1565 
   1566   // Store the arguments adaptor context sentinel.
   1567   __ Push(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
   1568 
   1569   // Push the function on the stack.
   1570   __ push(rdi);
   1571 
   1572   // Preserve the number of arguments on the stack. Must preserve rax,
   1573   // rbx and rcx because these registers are used when copying the
   1574   // arguments and the receiver.
   1575   __ Integer32ToSmi(r8, rax);
   1576   __ push(r8);
   1577 }
   1578 
   1579 
   1580 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
   1581   // Retrieve the number of arguments from the stack. Number is a Smi.
   1582   __ movq(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
   1583 
   1584   // Leave the frame.
   1585   __ movq(rsp, rbp);
   1586   __ pop(rbp);
   1587 
   1588   // Remove caller arguments from the stack.
   1589   __ pop(rcx);
   1590   SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
   1591   __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
   1592   __ push(rcx);
   1593 }
   1594 
   1595 
   1596 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
   1597   // ----------- S t a t e -------------
   1598   //  -- rax : actual number of arguments
   1599   //  -- rbx : expected number of arguments
   1600   //  -- rcx : call kind information
   1601   //  -- rdx : code entry to call
   1602   // -----------------------------------
   1603 
   1604   Label invoke, dont_adapt_arguments;
   1605   Counters* counters = masm->isolate()->counters();
   1606   __ IncrementCounter(counters->arguments_adaptors(), 1);
   1607 
   1608   Label enough, too_few;
   1609   __ cmpq(rax, rbx);
   1610   __ j(less, &too_few);
   1611   __ cmpq(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
   1612   __ j(equal, &dont_adapt_arguments);
   1613 
   1614   {  // Enough parameters: Actual >= expected.
   1615     __ bind(&enough);
   1616     EnterArgumentsAdaptorFrame(masm);
   1617 
   1618     // Copy receiver and all expected arguments.
   1619     const int offset = StandardFrameConstants::kCallerSPOffset;
   1620     __ lea(rax, Operand(rbp, rax, times_pointer_size, offset));
   1621     __ Set(r8, -1);  // account for receiver
   1622 
   1623     Label copy;
   1624     __ bind(&copy);
   1625     __ incq(r8);
   1626     __ push(Operand(rax, 0));
   1627     __ subq(rax, Immediate(kPointerSize));
   1628     __ cmpq(r8, rbx);
   1629     __ j(less, &copy);
   1630     __ jmp(&invoke);
   1631   }
   1632 
   1633   {  // Too few parameters: Actual < expected.
   1634     __ bind(&too_few);
   1635     EnterArgumentsAdaptorFrame(masm);
   1636 
   1637     // Copy receiver and all actual arguments.
   1638     const int offset = StandardFrameConstants::kCallerSPOffset;
   1639     __ lea(rdi, Operand(rbp, rax, times_pointer_size, offset));
   1640     __ Set(r8, -1);  // account for receiver
   1641 
   1642     Label copy;
   1643     __ bind(&copy);
   1644     __ incq(r8);
   1645     __ push(Operand(rdi, 0));
   1646     __ subq(rdi, Immediate(kPointerSize));
   1647     __ cmpq(r8, rax);
   1648     __ j(less, &copy);
   1649 
   1650     // Fill remaining expected arguments with undefined values.
   1651     Label fill;
   1652     __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
   1653     __ bind(&fill);
   1654     __ incq(r8);
   1655     __ push(kScratchRegister);
   1656     __ cmpq(r8, rbx);
   1657     __ j(less, &fill);
   1658 
   1659     // Restore function pointer.
   1660     __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
   1661   }
   1662 
   1663   // Call the entry point.
   1664   __ bind(&invoke);
   1665   __ call(rdx);
   1666 
   1667   // Store offset of return address for deoptimizer.
   1668   masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
   1669 
   1670   // Leave frame and return.
   1671   LeaveArgumentsAdaptorFrame(masm);
   1672   __ ret(0);
   1673 
   1674   // -------------------------------------------
   1675   // Dont adapt arguments.
   1676   // -------------------------------------------
   1677   __ bind(&dont_adapt_arguments);
   1678   __ jmp(rdx);
   1679 }
   1680 
   1681 
   1682 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
   1683   // Get the loop depth of the stack guard check. This is recorded in
   1684   // a test(rax, depth) instruction right after the call.
   1685   Label stack_check;
   1686   __ movq(rbx, Operand(rsp, 0));  // return address
   1687   __ movzxbq(rbx, Operand(rbx, 1));  // depth
   1688 
   1689   // Get the loop nesting level at which we allow OSR from the
   1690   // unoptimized code and check if we want to do OSR yet. If not we
   1691   // should perform a stack guard check so we can get interrupts while
   1692   // waiting for on-stack replacement.
   1693   __ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
   1694   __ movq(rcx, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset));
   1695   __ movq(rcx, FieldOperand(rcx, SharedFunctionInfo::kCodeOffset));
   1696   __ cmpb(rbx, FieldOperand(rcx, Code::kAllowOSRAtLoopNestingLevelOffset));
   1697   __ j(greater, &stack_check);
   1698 
   1699   // Pass the function to optimize as the argument to the on-stack
   1700   // replacement runtime function.
   1701   {
   1702     FrameScope scope(masm, StackFrame::INTERNAL);
   1703     __ push(rax);
   1704     __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
   1705   }
   1706 
   1707   // If the result was -1 it means that we couldn't optimize the
   1708   // function. Just return and continue in the unoptimized version.
   1709   Label skip;
   1710   __ SmiCompare(rax, Smi::FromInt(-1));
   1711   __ j(not_equal, &skip, Label::kNear);
   1712   __ ret(0);
   1713 
   1714   // If we decide not to perform on-stack replacement we perform a
   1715   // stack guard check to enable interrupts.
   1716   __ bind(&stack_check);
   1717   Label ok;
   1718   __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
   1719   __ j(above_equal, &ok, Label::kNear);
   1720 
   1721   StackCheckStub stub;
   1722   __ TailCallStub(&stub);
   1723   if (FLAG_debug_code) {
   1724     __ Abort("Unreachable code: returned from tail call.");
   1725   }
   1726   __ bind(&ok);
   1727   __ ret(0);
   1728 
   1729   __ bind(&skip);
   1730   // Untag the AST id and push it on the stack.
   1731   __ SmiToInteger32(rax, rax);
   1732   __ push(rax);
   1733 
   1734   // Generate the code for doing the frame-to-frame translation using
   1735   // the deoptimizer infrastructure.
   1736   Deoptimizer::EntryGenerator generator(masm, Deoptimizer::OSR);
   1737   generator.Generate();
   1738 }
   1739 
   1740 
   1741 #undef __
   1742 
   1743 } }  // namespace v8::internal
   1744 
   1745 #endif  // V8_TARGET_ARCH_X64
   1746