Home | History | Annotate | Download | only in x64
      1 // Copyright 2011 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 
     28 #include "v8.h"
     29 
     30 #if defined(V8_TARGET_ARCH_X64)
     31 
     32 #include "codegen.h"
     33 #include "deoptimizer.h"
     34 #include "full-codegen.h"
     35 
     36 namespace v8 {
     37 namespace internal {
     38 
     39 
     40 #define __ ACCESS_MASM(masm)
     41 
     42 
     43 void Builtins::Generate_Adaptor(MacroAssembler* masm,
     44                                 CFunctionId id,
     45                                 BuiltinExtraArguments extra_args) {
     46   // ----------- S t a t e -------------
     47   //  -- rax                : number of arguments excluding receiver
     48   //  -- rdi                : called function (only guaranteed when
     49   //                          extra_args requires it)
     50   //  -- rsi                : context
     51   //  -- rsp[0]             : return address
     52   //  -- rsp[8]             : last argument
     53   //  -- ...
     54   //  -- rsp[8 * argc]      : first argument (argc == rax)
     55   //  -- rsp[8 * (argc +1)] : receiver
     56   // -----------------------------------
     57 
     58   // Insert extra arguments.
     59   int num_extra_args = 0;
     60   if (extra_args == NEEDS_CALLED_FUNCTION) {
     61     num_extra_args = 1;
     62     __ pop(kScratchRegister);  // Save return address.
     63     __ push(rdi);
     64     __ push(kScratchRegister);  // Restore return address.
     65   } else {
     66     ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
     67   }
     68 
     69   // JumpToExternalReference expects rax to contain the number of arguments
     70   // including the receiver and the extra arguments.
     71   __ addq(rax, Immediate(num_extra_args + 1));
     72   __ JumpToExternalReference(ExternalReference(id, masm->isolate()), 1);
     73 }
     74 
     75 
     76 void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
     77   // ----------- S t a t e -------------
     78   //  -- rax: number of arguments
     79   //  -- rdi: constructor function
     80   // -----------------------------------
     81 
     82   Label non_function_call;
     83   // Check that function is not a smi.
     84   __ JumpIfSmi(rdi, &non_function_call);
     85   // Check that function is a JSFunction.
     86   __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
     87   __ j(not_equal, &non_function_call);
     88 
     89   // Jump to the function-specific construct stub.
     90   __ movq(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
     91   __ movq(rbx, FieldOperand(rbx, SharedFunctionInfo::kConstructStubOffset));
     92   __ lea(rbx, FieldOperand(rbx, Code::kHeaderSize));
     93   __ jmp(rbx);
     94 
     95   // rdi: called object
     96   // rax: number of arguments
     97   __ bind(&non_function_call);
     98   // Set expected number of arguments to zero (not changing rax).
     99   __ Set(rbx, 0);
    100   __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
    101   __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
    102           RelocInfo::CODE_TARGET);
    103 }
    104 
    105 
    106 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
    107                                            bool is_api_function,
    108                                            bool count_constructions) {
    109   // Should never count constructions for api objects.
    110   ASSERT(!is_api_function || !count_constructions);
    111 
    112     // Enter a construct frame.
    113   __ EnterConstructFrame();
    114 
    115   // Store a smi-tagged arguments count on the stack.
    116   __ Integer32ToSmi(rax, rax);
    117   __ push(rax);
    118 
    119   // Push the function to invoke on the stack.
    120   __ push(rdi);
    121 
    122   // Try to allocate the object without transitioning into C code. If any of the
    123   // preconditions is not met, the code bails out to the runtime call.
    124   Label rt_call, allocated;
    125   if (FLAG_inline_new) {
    126     Label undo_allocation;
    127 
    128 #ifdef ENABLE_DEBUGGER_SUPPORT
    129     ExternalReference debug_step_in_fp =
    130         ExternalReference::debug_step_in_fp_address(masm->isolate());
    131     __ movq(kScratchRegister, debug_step_in_fp);
    132     __ cmpq(Operand(kScratchRegister, 0), Immediate(0));
    133     __ j(not_equal, &rt_call);
    134 #endif
    135 
    136     // Verified that the constructor is a JSFunction.
    137     // Load the initial map and verify that it is in fact a map.
    138     // rdi: constructor
    139     __ movq(rax, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
    140     // Will both indicate a NULL and a Smi
    141     ASSERT(kSmiTag == 0);
    142     __ JumpIfSmi(rax, &rt_call);
    143     // rdi: constructor
    144     // rax: initial map (if proven valid below)
    145     __ CmpObjectType(rax, MAP_TYPE, rbx);
    146     __ j(not_equal, &rt_call);
    147 
    148     // Check that the constructor is not constructing a JSFunction (see comments
    149     // in Runtime_NewObject in runtime.cc). In which case the initial map's
    150     // instance type would be JS_FUNCTION_TYPE.
    151     // rdi: constructor
    152     // rax: initial map
    153     __ CmpInstanceType(rax, JS_FUNCTION_TYPE);
    154     __ j(equal, &rt_call);
    155 
    156     if (count_constructions) {
    157       Label allocate;
    158       // Decrease generous allocation count.
    159       __ movq(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
    160       __ decb(FieldOperand(rcx, SharedFunctionInfo::kConstructionCountOffset));
    161       __ j(not_zero, &allocate);
    162 
    163       __ push(rax);
    164       __ push(rdi);
    165 
    166       __ push(rdi);  // constructor
    167       // The call will replace the stub, so the countdown is only done once.
    168       __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
    169 
    170       __ pop(rdi);
    171       __ pop(rax);
    172 
    173       __ bind(&allocate);
    174     }
    175 
    176     // Now allocate the JSObject on the heap.
    177     __ movzxbq(rdi, FieldOperand(rax, Map::kInstanceSizeOffset));
    178     __ shl(rdi, Immediate(kPointerSizeLog2));
    179     // rdi: size of new object
    180     __ AllocateInNewSpace(rdi,
    181                           rbx,
    182                           rdi,
    183                           no_reg,
    184                           &rt_call,
    185                           NO_ALLOCATION_FLAGS);
    186     // Allocated the JSObject, now initialize the fields.
    187     // rax: initial map
    188     // rbx: JSObject (not HeapObject tagged - the actual address).
    189     // rdi: start of next object
    190     __ movq(Operand(rbx, JSObject::kMapOffset), rax);
    191     __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
    192     __ movq(Operand(rbx, JSObject::kPropertiesOffset), rcx);
    193     __ movq(Operand(rbx, JSObject::kElementsOffset), rcx);
    194     // Set extra fields in the newly allocated object.
    195     // rax: initial map
    196     // rbx: JSObject
    197     // rdi: start of next object
    198     { Label loop, entry;
    199       // To allow for truncation.
    200       if (count_constructions) {
    201         __ LoadRoot(rdx, Heap::kOnePointerFillerMapRootIndex);
    202       } else {
    203         __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
    204       }
    205       __ lea(rcx, Operand(rbx, JSObject::kHeaderSize));
    206       __ jmp(&entry);
    207       __ bind(&loop);
    208       __ movq(Operand(rcx, 0), rdx);
    209       __ addq(rcx, Immediate(kPointerSize));
    210       __ bind(&entry);
    211       __ cmpq(rcx, rdi);
    212       __ j(less, &loop);
    213     }
    214 
    215     // Add the object tag to make the JSObject real, so that we can continue and
    216     // jump into the continuation code at any time from now on. Any failures
    217     // need to undo the allocation, so that the heap is in a consistent state
    218     // and verifiable.
    219     // rax: initial map
    220     // rbx: JSObject
    221     // rdi: start of next object
    222     __ or_(rbx, Immediate(kHeapObjectTag));
    223 
    224     // Check if a non-empty properties array is needed.
    225     // Allocate and initialize a FixedArray if it is.
    226     // rax: initial map
    227     // rbx: JSObject
    228     // rdi: start of next object
    229     // Calculate total properties described map.
    230     __ movzxbq(rdx, FieldOperand(rax, Map::kUnusedPropertyFieldsOffset));
    231     __ movzxbq(rcx, FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset));
    232     __ addq(rdx, rcx);
    233     // Calculate unused properties past the end of the in-object properties.
    234     __ movzxbq(rcx, FieldOperand(rax, Map::kInObjectPropertiesOffset));
    235     __ subq(rdx, rcx);
    236     // Done if no extra properties are to be allocated.
    237     __ j(zero, &allocated);
    238     __ Assert(positive, "Property allocation count failed.");
    239 
    240     // Scale the number of elements by pointer size and add the header for
    241     // FixedArrays to the start of the next object calculation from above.
    242     // rbx: JSObject
    243     // rdi: start of next object (will be start of FixedArray)
    244     // rdx: number of elements in properties array
    245     __ AllocateInNewSpace(FixedArray::kHeaderSize,
    246                           times_pointer_size,
    247                           rdx,
    248                           rdi,
    249                           rax,
    250                           no_reg,
    251                           &undo_allocation,
    252                           RESULT_CONTAINS_TOP);
    253 
    254     // Initialize the FixedArray.
    255     // rbx: JSObject
    256     // rdi: FixedArray
    257     // rdx: number of elements
    258     // rax: start of next object
    259     __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex);
    260     __ movq(Operand(rdi, HeapObject::kMapOffset), rcx);  // setup the map
    261     __ Integer32ToSmi(rdx, rdx);
    262     __ movq(Operand(rdi, FixedArray::kLengthOffset), rdx);  // and length
    263 
    264     // Initialize the fields to undefined.
    265     // rbx: JSObject
    266     // rdi: FixedArray
    267     // rax: start of next object
    268     // rdx: number of elements
    269     { Label loop, entry;
    270       __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
    271       __ lea(rcx, Operand(rdi, FixedArray::kHeaderSize));
    272       __ jmp(&entry);
    273       __ bind(&loop);
    274       __ movq(Operand(rcx, 0), rdx);
    275       __ addq(rcx, Immediate(kPointerSize));
    276       __ bind(&entry);
    277       __ cmpq(rcx, rax);
    278       __ j(below, &loop);
    279     }
    280 
    281     // Store the initialized FixedArray into the properties field of
    282     // the JSObject
    283     // rbx: JSObject
    284     // rdi: FixedArray
    285     __ or_(rdi, Immediate(kHeapObjectTag));  // add the heap tag
    286     __ movq(FieldOperand(rbx, JSObject::kPropertiesOffset), rdi);
    287 
    288 
    289     // Continue with JSObject being successfully allocated
    290     // rbx: JSObject
    291     __ jmp(&allocated);
    292 
    293     // Undo the setting of the new top so that the heap is verifiable. For
    294     // example, the map's unused properties potentially do not match the
    295     // allocated objects unused properties.
    296     // rbx: JSObject (previous new top)
    297     __ bind(&undo_allocation);
    298     __ UndoAllocationInNewSpace(rbx);
    299   }
    300 
    301   // Allocate the new receiver object using the runtime call.
    302   // rdi: function (constructor)
    303   __ bind(&rt_call);
    304   // Must restore rdi (constructor) before calling runtime.
    305   __ movq(rdi, Operand(rsp, 0));
    306   __ push(rdi);
    307   __ CallRuntime(Runtime::kNewObject, 1);
    308   __ movq(rbx, rax);  // store result in rbx
    309 
    310   // New object allocated.
    311   // rbx: newly allocated object
    312   __ bind(&allocated);
    313   // Retrieve the function from the stack.
    314   __ pop(rdi);
    315 
    316   // Retrieve smi-tagged arguments count from the stack.
    317   __ movq(rax, Operand(rsp, 0));
    318   __ SmiToInteger32(rax, rax);
    319 
    320   // Push the allocated receiver to the stack. We need two copies
    321   // because we may have to return the original one and the calling
    322   // conventions dictate that the called function pops the receiver.
    323   __ push(rbx);
    324   __ push(rbx);
    325 
    326   // Setup pointer to last argument.
    327   __ lea(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
    328 
    329   // Copy arguments and receiver to the expression stack.
    330   Label loop, entry;
    331   __ movq(rcx, rax);
    332   __ jmp(&entry);
    333   __ bind(&loop);
    334   __ push(Operand(rbx, rcx, times_pointer_size, 0));
    335   __ bind(&entry);
    336   __ decq(rcx);
    337   __ j(greater_equal, &loop);
    338 
    339   // Call the function.
    340   if (is_api_function) {
    341     __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
    342     Handle<Code> code =
    343         masm->isolate()->builtins()->HandleApiCallConstruct();
    344     ParameterCount expected(0);
    345     __ InvokeCode(code, expected, expected,
    346                   RelocInfo::CODE_TARGET, CALL_FUNCTION);
    347   } else {
    348     ParameterCount actual(rax);
    349     __ InvokeFunction(rdi, actual, CALL_FUNCTION);
    350   }
    351 
    352   // Restore context from the frame.
    353   __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
    354 
    355   // If the result is an object (in the ECMA sense), we should get rid
    356   // of the receiver and use the result; see ECMA-262 section 13.2.2-7
    357   // on page 74.
    358   Label use_receiver, exit;
    359   // If the result is a smi, it is *not* an object in the ECMA sense.
    360   __ JumpIfSmi(rax, &use_receiver);
    361 
    362   // If the type of the result (stored in its map) is less than
    363   // FIRST_JS_OBJECT_TYPE, it is not an object in the ECMA sense.
    364   __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
    365   __ j(above_equal, &exit);
    366 
    367   // Throw away the result of the constructor invocation and use the
    368   // on-stack receiver as the result.
    369   __ bind(&use_receiver);
    370   __ movq(rax, Operand(rsp, 0));
    371 
    372   // Restore the arguments count and leave the construct frame.
    373   __ bind(&exit);
    374   __ movq(rbx, Operand(rsp, kPointerSize));  // get arguments count
    375   __ LeaveConstructFrame();
    376 
    377   // Remove caller arguments from the stack and return.
    378   __ pop(rcx);
    379   SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
    380   __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
    381   __ push(rcx);
    382   Counters* counters = masm->isolate()->counters();
    383   __ IncrementCounter(counters->constructed_objects(), 1);
    384   __ ret(0);
    385 }
    386 
    387 
    388 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
    389   Generate_JSConstructStubHelper(masm, false, true);
    390 }
    391 
    392 
    393 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
    394   Generate_JSConstructStubHelper(masm, false, false);
    395 }
    396 
    397 
    398 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
    399   Generate_JSConstructStubHelper(masm, true, false);
    400 }
    401 
    402 
    403 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
    404                                              bool is_construct) {
    405   // Expects five C++ function parameters.
    406   // - Address entry (ignored)
    407   // - JSFunction* function (
    408   // - Object* receiver
    409   // - int argc
    410   // - Object*** argv
    411   // (see Handle::Invoke in execution.cc).
    412 
    413   // Platform specific argument handling. After this, the stack contains
    414   // an internal frame and the pushed function and receiver, and
    415   // register rax and rbx holds the argument count and argument array,
    416   // while rdi holds the function pointer and rsi the context.
    417 #ifdef _WIN64
    418   // MSVC parameters in:
    419   // rcx : entry (ignored)
    420   // rdx : function
    421   // r8 : receiver
    422   // r9 : argc
    423   // [rsp+0x20] : argv
    424 
    425   // Clear the context before we push it when entering the JS frame.
    426   __ Set(rsi, 0);
    427   __ EnterInternalFrame();
    428 
    429   // Load the function context into rsi.
    430   __ movq(rsi, FieldOperand(rdx, JSFunction::kContextOffset));
    431 
    432   // Push the function and the receiver onto the stack.
    433   __ push(rdx);
    434   __ push(r8);
    435 
    436   // Load the number of arguments and setup pointer to the arguments.
    437   __ movq(rax, r9);
    438   // Load the previous frame pointer to access C argument on stack
    439   __ movq(kScratchRegister, Operand(rbp, 0));
    440   __ movq(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
    441   // Load the function pointer into rdi.
    442   __ movq(rdi, rdx);
    443 #else  // _WIN64
    444   // GCC parameters in:
    445   // rdi : entry (ignored)
    446   // rsi : function
    447   // rdx : receiver
    448   // rcx : argc
    449   // r8  : argv
    450 
    451   __ movq(rdi, rsi);
    452   // rdi : function
    453 
    454   // Clear the context before we push it when entering the JS frame.
    455   __ Set(rsi, 0);
    456   // Enter an internal frame.
    457   __ EnterInternalFrame();
    458 
    459   // Push the function and receiver and setup the context.
    460   __ push(rdi);
    461   __ push(rdx);
    462   __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
    463 
    464   // Load the number of arguments and setup pointer to the arguments.
    465   __ movq(rax, rcx);
    466   __ movq(rbx, r8);
    467 #endif  // _WIN64
    468 
    469   // Current stack contents:
    470   // [rsp + 2 * kPointerSize ... ]: Internal frame
    471   // [rsp + kPointerSize]         : function
    472   // [rsp]                        : receiver
    473   // Current register contents:
    474   // rax : argc
    475   // rbx : argv
    476   // rsi : context
    477   // rdi : function
    478 
    479   // Copy arguments to the stack in a loop.
    480   // Register rbx points to array of pointers to handle locations.
    481   // Push the values of these handles.
    482   Label loop, entry;
    483   __ Set(rcx, 0);  // Set loop variable to 0.
    484   __ jmp(&entry);
    485   __ bind(&loop);
    486   __ movq(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
    487   __ push(Operand(kScratchRegister, 0));  // dereference handle
    488   __ addq(rcx, Immediate(1));
    489   __ bind(&entry);
    490   __ cmpq(rcx, rax);
    491   __ j(not_equal, &loop);
    492 
    493   // Invoke the code.
    494   if (is_construct) {
    495     // Expects rdi to hold function pointer.
    496     __ Call(masm->isolate()->builtins()->JSConstructCall(),
    497             RelocInfo::CODE_TARGET);
    498   } else {
    499     ParameterCount actual(rax);
    500     // Function must be in rdi.
    501     __ InvokeFunction(rdi, actual, CALL_FUNCTION);
    502   }
    503 
    504   // Exit the JS frame. Notice that this also removes the empty
    505   // context and the function left on the stack by the code
    506   // invocation.
    507   __ LeaveInternalFrame();
    508   // TODO(X64): Is argument correct? Is there a receiver to remove?
    509   __ ret(1 * kPointerSize);  // remove receiver
    510 }
    511 
    512 
    513 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
    514   Generate_JSEntryTrampolineHelper(masm, false);
    515 }
    516 
    517 
    518 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
    519   Generate_JSEntryTrampolineHelper(masm, true);
    520 }
    521 
    522 
    523 void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
    524   // Enter an internal frame.
    525   __ EnterInternalFrame();
    526 
    527   // Push a copy of the function onto the stack.
    528   __ push(rdi);
    529 
    530   __ push(rdi);  // Function is also the parameter to the runtime call.
    531   __ CallRuntime(Runtime::kLazyCompile, 1);
    532   __ pop(rdi);
    533 
    534   // Tear down temporary frame.
    535   __ LeaveInternalFrame();
    536 
    537   // Do a tail-call of the compiled function.
    538   __ lea(rcx, FieldOperand(rax, Code::kHeaderSize));
    539   __ jmp(rcx);
    540 }
    541 
    542 
    543 void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
    544   // Enter an internal frame.
    545   __ EnterInternalFrame();
    546 
    547   // Push a copy of the function onto the stack.
    548   __ push(rdi);
    549 
    550   __ push(rdi);  // Function is also the parameter to the runtime call.
    551   __ CallRuntime(Runtime::kLazyRecompile, 1);
    552 
    553   // Restore function and tear down temporary frame.
    554   __ pop(rdi);
    555   __ LeaveInternalFrame();
    556 
    557   // Do a tail-call of the compiled function.
    558   __ lea(rcx, FieldOperand(rax, Code::kHeaderSize));
    559   __ jmp(rcx);
    560 }
    561 
    562 
    563 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
    564                                              Deoptimizer::BailoutType type) {
    565   // Enter an internal frame.
    566   __ EnterInternalFrame();
    567 
    568   // Pass the deoptimization type to the runtime system.
    569   __ Push(Smi::FromInt(static_cast<int>(type)));
    570 
    571   __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
    572   // Tear down temporary frame.
    573   __ LeaveInternalFrame();
    574 
    575   // Get the full codegen state from the stack and untag it.
    576   __ SmiToInteger32(rcx, Operand(rsp, 1 * kPointerSize));
    577 
    578   // Switch on the state.
    579   NearLabel not_no_registers, not_tos_rax;
    580   __ cmpq(rcx, Immediate(FullCodeGenerator::NO_REGISTERS));
    581   __ j(not_equal, &not_no_registers);
    582   __ ret(1 * kPointerSize);  // Remove state.
    583 
    584   __ bind(&not_no_registers);
    585   __ movq(rax, Operand(rsp, 2 * kPointerSize));
    586   __ cmpq(rcx, Immediate(FullCodeGenerator::TOS_REG));
    587   __ j(not_equal, &not_tos_rax);
    588   __ ret(2 * kPointerSize);  // Remove state, rax.
    589 
    590   __ bind(&not_tos_rax);
    591   __ Abort("no cases left");
    592 }
    593 
    594 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
    595   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
    596 }
    597 
    598 
    599 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
    600   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
    601 }
    602 
    603 
    604 void Builtins::Generate_NotifyOSR(MacroAssembler* masm) {
    605   // For now, we are relying on the fact that Runtime::NotifyOSR
    606   // doesn't do any garbage collection which allows us to save/restore
    607   // the registers without worrying about which of them contain
    608   // pointers. This seems a bit fragile.
    609   __ Pushad();
    610   __ EnterInternalFrame();
    611   __ CallRuntime(Runtime::kNotifyOSR, 0);
    612   __ LeaveInternalFrame();
    613   __ Popad();
    614   __ ret(0);
    615 }
    616 
    617 
    618 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
    619   // Stack Layout:
    620   // rsp[0]:   Return address
    621   // rsp[1]:   Argument n
    622   // rsp[2]:   Argument n-1
    623   //  ...
    624   // rsp[n]:   Argument 1
    625   // rsp[n+1]: Receiver (function to call)
    626   //
    627   // rax contains the number of arguments, n, not counting the receiver.
    628   //
    629   // 1. Make sure we have at least one argument.
    630   { Label done;
    631     __ testq(rax, rax);
    632     __ j(not_zero, &done);
    633     __ pop(rbx);
    634     __ Push(FACTORY->undefined_value());
    635     __ push(rbx);
    636     __ incq(rax);
    637     __ bind(&done);
    638   }
    639 
    640   // 2. Get the function to call (passed as receiver) from the stack, check
    641   //    if it is a function.
    642   Label non_function;
    643   // The function to call is at position n+1 on the stack.
    644   __ movq(rdi, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
    645   __ JumpIfSmi(rdi, &non_function);
    646   __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
    647   __ j(not_equal, &non_function);
    648 
    649   // 3a. Patch the first argument if necessary when calling a function.
    650   Label shift_arguments;
    651   { Label convert_to_object, use_global_receiver, patch_receiver;
    652     // Change context eagerly in case we need the global receiver.
    653     __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
    654 
    655     // Do not transform the receiver for strict mode functions.
    656     __ movq(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
    657     __ testb(FieldOperand(rbx, SharedFunctionInfo::kStrictModeByteOffset),
    658              Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
    659     __ j(not_equal, &shift_arguments);
    660 
    661     // Compute the receiver in non-strict mode.
    662     __ movq(rbx, Operand(rsp, rax, times_pointer_size, 0));
    663     __ JumpIfSmi(rbx, &convert_to_object);
    664 
    665     __ CompareRoot(rbx, Heap::kNullValueRootIndex);
    666     __ j(equal, &use_global_receiver);
    667     __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
    668     __ j(equal, &use_global_receiver);
    669 
    670     __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, rcx);
    671     __ j(below, &convert_to_object);
    672     __ CmpInstanceType(rcx, LAST_JS_OBJECT_TYPE);
    673     __ j(below_equal, &shift_arguments);
    674 
    675     __ bind(&convert_to_object);
    676     __ EnterInternalFrame();  // In order to preserve argument count.
    677     __ Integer32ToSmi(rax, rax);
    678     __ push(rax);
    679 
    680     __ push(rbx);
    681     __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
    682     __ movq(rbx, rax);
    683 
    684     __ pop(rax);
    685     __ SmiToInteger32(rax, rax);
    686     __ LeaveInternalFrame();
    687     // Restore the function to rdi.
    688     __ movq(rdi, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
    689     __ jmp(&patch_receiver);
    690 
    691     // Use the global receiver object from the called function as the
    692     // receiver.
    693     __ bind(&use_global_receiver);
    694     const int kGlobalIndex =
    695         Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
    696     __ movq(rbx, FieldOperand(rsi, kGlobalIndex));
    697     __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalContextOffset));
    698     __ movq(rbx, FieldOperand(rbx, kGlobalIndex));
    699     __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
    700 
    701     __ bind(&patch_receiver);
    702     __ movq(Operand(rsp, rax, times_pointer_size, 0), rbx);
    703 
    704     __ jmp(&shift_arguments);
    705   }
    706 
    707 
    708   // 3b. Patch the first argument when calling a non-function.  The
    709   //     CALL_NON_FUNCTION builtin expects the non-function callee as
    710   //     receiver, so overwrite the first argument which will ultimately
    711   //     become the receiver.
    712   __ bind(&non_function);
    713   __ movq(Operand(rsp, rax, times_pointer_size, 0), rdi);
    714   __ Set(rdi, 0);
    715 
    716   // 4. Shift arguments and return address one slot down on the stack
    717   //    (overwriting the original receiver).  Adjust argument count to make
    718   //    the original first argument the new receiver.
    719   __ bind(&shift_arguments);
    720   { Label loop;
    721     __ movq(rcx, rax);
    722     __ bind(&loop);
    723     __ movq(rbx, Operand(rsp, rcx, times_pointer_size, 0));
    724     __ movq(Operand(rsp, rcx, times_pointer_size, 1 * kPointerSize), rbx);
    725     __ decq(rcx);
    726     __ j(not_sign, &loop);  // While non-negative (to copy return address).
    727     __ pop(rbx);  // Discard copy of return address.
    728     __ decq(rax);  // One fewer argument (first argument is new receiver).
    729   }
    730 
    731   // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin.
    732   { Label function;
    733     __ testq(rdi, rdi);
    734     __ j(not_zero, &function);
    735     __ Set(rbx, 0);
    736     __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
    737     __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
    738             RelocInfo::CODE_TARGET);
    739     __ bind(&function);
    740   }
    741 
    742   // 5b. Get the code to call from the function and check that the number of
    743   //     expected arguments matches what we're providing.  If so, jump
    744   //     (tail-call) to the code in register edx without checking arguments.
    745   __ movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
    746   __ movsxlq(rbx,
    747              FieldOperand(rdx,
    748                           SharedFunctionInfo::kFormalParameterCountOffset));
    749   __ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
    750   __ cmpq(rax, rbx);
    751   __ j(not_equal,
    752        masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
    753        RelocInfo::CODE_TARGET);
    754 
    755   ParameterCount expected(0);
    756   __ InvokeCode(rdx, expected, expected, JUMP_FUNCTION);
    757 }
    758 
    759 
    760 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
    761   // Stack at entry:
    762   //    rsp: return address
    763   //  rsp+8: arguments
    764   // rsp+16: receiver ("this")
    765   // rsp+24: function
    766   __ EnterInternalFrame();
    767   // Stack frame:
    768   //    rbp: Old base pointer
    769   // rbp[1]: return address
    770   // rbp[2]: function arguments
    771   // rbp[3]: receiver
    772   // rbp[4]: function
    773   static const int kArgumentsOffset = 2 * kPointerSize;
    774   static const int kReceiverOffset = 3 * kPointerSize;
    775   static const int kFunctionOffset = 4 * kPointerSize;
    776   __ push(Operand(rbp, kFunctionOffset));
    777   __ push(Operand(rbp, kArgumentsOffset));
    778   __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
    779 
    780   // Check the stack for overflow. We are not trying need to catch
    781   // interruptions (e.g. debug break and preemption) here, so the "real stack
    782   // limit" is checked.
    783   Label okay;
    784   __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
    785   __ movq(rcx, rsp);
    786   // Make rcx the space we have left. The stack might already be overflowed
    787   // here which will cause rcx to become negative.
    788   __ subq(rcx, kScratchRegister);
    789   // Make rdx the space we need for the array when it is unrolled onto the
    790   // stack.
    791   __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2);
    792   // Check if the arguments will overflow the stack.
    793   __ cmpq(rcx, rdx);
    794   __ j(greater, &okay);  // Signed comparison.
    795 
    796   // Out of stack space.
    797   __ push(Operand(rbp, kFunctionOffset));
    798   __ push(rax);
    799   __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
    800   __ bind(&okay);
    801   // End of stack check.
    802 
    803   // Push current index and limit.
    804   const int kLimitOffset =
    805       StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize;
    806   const int kIndexOffset = kLimitOffset - 1 * kPointerSize;
    807   __ push(rax);  // limit
    808   __ push(Immediate(0));  // index
    809 
    810   // Change context eagerly to get the right global object if
    811   // necessary.
    812   __ movq(rdi, Operand(rbp, kFunctionOffset));
    813   __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
    814 
    815   // Compute the receiver.
    816   Label call_to_object, use_global_receiver, push_receiver;
    817   __ movq(rbx, Operand(rbp, kReceiverOffset));
    818 
    819   // Do not transform the receiver for strict mode functions.
    820   __ movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
    821   __ testb(FieldOperand(rdx, SharedFunctionInfo::kStrictModeByteOffset),
    822            Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
    823   __ j(not_equal, &push_receiver);
    824 
    825   // Compute the receiver in non-strict mode.
    826   __ JumpIfSmi(rbx, &call_to_object);
    827   __ CompareRoot(rbx, Heap::kNullValueRootIndex);
    828   __ j(equal, &use_global_receiver);
    829   __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
    830   __ j(equal, &use_global_receiver);
    831 
    832   // If given receiver is already a JavaScript object then there's no
    833   // reason for converting it.
    834   __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, rcx);
    835   __ j(below, &call_to_object);
    836   __ CmpInstanceType(rcx, LAST_JS_OBJECT_TYPE);
    837   __ j(below_equal, &push_receiver);
    838 
    839   // Convert the receiver to an object.
    840   __ bind(&call_to_object);
    841   __ push(rbx);
    842   __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
    843   __ movq(rbx, rax);
    844   __ jmp(&push_receiver);
    845 
    846   // Use the current global receiver object as the receiver.
    847   __ bind(&use_global_receiver);
    848   const int kGlobalOffset =
    849       Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
    850   __ movq(rbx, FieldOperand(rsi, kGlobalOffset));
    851   __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalContextOffset));
    852   __ movq(rbx, FieldOperand(rbx, kGlobalOffset));
    853   __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
    854 
    855   // Push the receiver.
    856   __ bind(&push_receiver);
    857   __ push(rbx);
    858 
    859   // Copy all arguments from the array to the stack.
    860   Label entry, loop;
    861   __ movq(rax, Operand(rbp, kIndexOffset));
    862   __ jmp(&entry);
    863   __ bind(&loop);
    864   __ movq(rdx, Operand(rbp, kArgumentsOffset));  // load arguments
    865 
    866   // Use inline caching to speed up access to arguments.
    867   Handle<Code> ic =
    868       masm->isolate()->builtins()->KeyedLoadIC_Initialize();
    869   __ Call(ic, RelocInfo::CODE_TARGET);
    870   // It is important that we do not have a test instruction after the
    871   // call.  A test instruction after the call is used to indicate that
    872   // we have generated an inline version of the keyed load.  In this
    873   // case, we know that we are not generating a test instruction next.
    874 
    875   // Push the nth argument.
    876   __ push(rax);
    877 
    878   // Update the index on the stack and in register rax.
    879   __ movq(rax, Operand(rbp, kIndexOffset));
    880   __ SmiAddConstant(rax, rax, Smi::FromInt(1));
    881   __ movq(Operand(rbp, kIndexOffset), rax);
    882 
    883   __ bind(&entry);
    884   __ cmpq(rax, Operand(rbp, kLimitOffset));
    885   __ j(not_equal, &loop);
    886 
    887   // Invoke the function.
    888   ParameterCount actual(rax);
    889   __ SmiToInteger32(rax, rax);
    890   __ movq(rdi, Operand(rbp, kFunctionOffset));
    891   __ InvokeFunction(rdi, actual, CALL_FUNCTION);
    892 
    893   __ LeaveInternalFrame();
    894   __ ret(3 * kPointerSize);  // remove function, receiver, and arguments
    895 }
    896 
    897 
    898 // Number of empty elements to allocate for an empty array.
    899 static const int kPreallocatedArrayElements = 4;
    900 
    901 
    902 // Allocate an empty JSArray. The allocated array is put into the result
    903 // register. If the parameter initial_capacity is larger than zero an elements
    904 // backing store is allocated with this size and filled with the hole values.
    905 // Otherwise the elements backing store is set to the empty FixedArray.
    906 static void AllocateEmptyJSArray(MacroAssembler* masm,
    907                                  Register array_function,
    908                                  Register result,
    909                                  Register scratch1,
    910                                  Register scratch2,
    911                                  Register scratch3,
    912                                  int initial_capacity,
    913                                  Label* gc_required) {
    914   ASSERT(initial_capacity >= 0);
    915 
    916   // Load the initial map from the array function.
    917   __ movq(scratch1, FieldOperand(array_function,
    918                                  JSFunction::kPrototypeOrInitialMapOffset));
    919 
    920   // Allocate the JSArray object together with space for a fixed array with the
    921   // requested elements.
    922   int size = JSArray::kSize;
    923   if (initial_capacity > 0) {
    924     size += FixedArray::SizeFor(initial_capacity);
    925   }
    926   __ AllocateInNewSpace(size,
    927                         result,
    928                         scratch2,
    929                         scratch3,
    930                         gc_required,
    931                         TAG_OBJECT);
    932 
    933   // Allocated the JSArray. Now initialize the fields except for the elements
    934   // array.
    935   // result: JSObject
    936   // scratch1: initial map
    937   // scratch2: start of next object
    938   __ movq(FieldOperand(result, JSObject::kMapOffset), scratch1);
    939   __ Move(FieldOperand(result, JSArray::kPropertiesOffset),
    940           FACTORY->empty_fixed_array());
    941   // Field JSArray::kElementsOffset is initialized later.
    942   __ Move(FieldOperand(result, JSArray::kLengthOffset), Smi::FromInt(0));
    943 
    944   // If no storage is requested for the elements array just set the empty
    945   // fixed array.
    946   if (initial_capacity == 0) {
    947     __ Move(FieldOperand(result, JSArray::kElementsOffset),
    948             FACTORY->empty_fixed_array());
    949     return;
    950   }
    951 
    952   // Calculate the location of the elements array and set elements array member
    953   // of the JSArray.
    954   // result: JSObject
    955   // scratch2: start of next object
    956   __ lea(scratch1, Operand(result, JSArray::kSize));
    957   __ movq(FieldOperand(result, JSArray::kElementsOffset), scratch1);
    958 
    959   // Initialize the FixedArray and fill it with holes. FixedArray length is
    960   // stored as a smi.
    961   // result: JSObject
    962   // scratch1: elements array
    963   // scratch2: start of next object
    964   __ Move(FieldOperand(scratch1, HeapObject::kMapOffset),
    965           FACTORY->fixed_array_map());
    966   __ Move(FieldOperand(scratch1, FixedArray::kLengthOffset),
    967           Smi::FromInt(initial_capacity));
    968 
    969   // Fill the FixedArray with the hole value. Inline the code if short.
    970   // Reconsider loop unfolding if kPreallocatedArrayElements gets changed.
    971   static const int kLoopUnfoldLimit = 4;
    972   ASSERT(kPreallocatedArrayElements <= kLoopUnfoldLimit);
    973   __ Move(scratch3, FACTORY->the_hole_value());
    974   if (initial_capacity <= kLoopUnfoldLimit) {
    975     // Use a scratch register here to have only one reloc info when unfolding
    976     // the loop.
    977     for (int i = 0; i < initial_capacity; i++) {
    978       __ movq(FieldOperand(scratch1,
    979                            FixedArray::kHeaderSize + i * kPointerSize),
    980               scratch3);
    981     }
    982   } else {
    983     Label loop, entry;
    984     __ jmp(&entry);
    985     __ bind(&loop);
    986     __ movq(Operand(scratch1, 0), scratch3);
    987     __ addq(scratch1, Immediate(kPointerSize));
    988     __ bind(&entry);
    989     __ cmpq(scratch1, scratch2);
    990     __ j(below, &loop);
    991   }
    992 }
    993 
    994 
    995 // Allocate a JSArray with the number of elements stored in a register. The
    996 // register array_function holds the built-in Array function and the register
    997 // array_size holds the size of the array as a smi. The allocated array is put
    998 // into the result register and beginning and end of the FixedArray elements
    999 // storage is put into registers elements_array and elements_array_end  (see
   1000 // below for when that is not the case). If the parameter fill_with_holes is
   1001 // true the allocated elements backing store is filled with the hole values
   1002 // otherwise it is left uninitialized. When the backing store is filled the
   1003 // register elements_array is scratched.
   1004 static void AllocateJSArray(MacroAssembler* masm,
   1005                             Register array_function,  // Array function.
   1006                             Register array_size,  // As a smi.
   1007                             Register result,
   1008                             Register elements_array,
   1009                             Register elements_array_end,
   1010                             Register scratch,
   1011                             bool fill_with_hole,
   1012                             Label* gc_required) {
   1013   Label not_empty, allocated;
   1014 
   1015   // Load the initial map from the array function.
   1016   __ movq(elements_array,
   1017           FieldOperand(array_function,
   1018                        JSFunction::kPrototypeOrInitialMapOffset));
   1019 
   1020   // Check whether an empty sized array is requested.
   1021   __ testq(array_size, array_size);
   1022   __ j(not_zero, &not_empty);
   1023 
   1024   // If an empty array is requested allocate a small elements array anyway. This
   1025   // keeps the code below free of special casing for the empty array.
   1026   int size = JSArray::kSize + FixedArray::SizeFor(kPreallocatedArrayElements);
   1027   __ AllocateInNewSpace(size,
   1028                         result,
   1029                         elements_array_end,
   1030                         scratch,
   1031                         gc_required,
   1032                         TAG_OBJECT);
   1033   __ jmp(&allocated);
   1034 
   1035   // Allocate the JSArray object together with space for a FixedArray with the
   1036   // requested elements.
   1037   __ bind(&not_empty);
   1038   SmiIndex index =
   1039       masm->SmiToIndex(kScratchRegister, array_size, kPointerSizeLog2);
   1040   __ AllocateInNewSpace(JSArray::kSize + FixedArray::kHeaderSize,
   1041                         index.scale,
   1042                         index.reg,
   1043                         result,
   1044                         elements_array_end,
   1045                         scratch,
   1046                         gc_required,
   1047                         TAG_OBJECT);
   1048 
   1049   // Allocated the JSArray. Now initialize the fields except for the elements
   1050   // array.
   1051   // result: JSObject
   1052   // elements_array: initial map
   1053   // elements_array_end: start of next object
   1054   // array_size: size of array (smi)
   1055   __ bind(&allocated);
   1056   __ movq(FieldOperand(result, JSObject::kMapOffset), elements_array);
   1057   __ Move(elements_array, FACTORY->empty_fixed_array());
   1058   __ movq(FieldOperand(result, JSArray::kPropertiesOffset), elements_array);
   1059   // Field JSArray::kElementsOffset is initialized later.
   1060   __ movq(FieldOperand(result, JSArray::kLengthOffset), array_size);
   1061 
   1062   // Calculate the location of the elements array and set elements array member
   1063   // of the JSArray.
   1064   // result: JSObject
   1065   // elements_array_end: start of next object
   1066   // array_size: size of array (smi)
   1067   __ lea(elements_array, Operand(result, JSArray::kSize));
   1068   __ movq(FieldOperand(result, JSArray::kElementsOffset), elements_array);
   1069 
   1070   // Initialize the fixed array. FixedArray length is stored as a smi.
   1071   // result: JSObject
   1072   // elements_array: elements array
   1073   // elements_array_end: start of next object
   1074   // array_size: size of array (smi)
   1075   __ Move(FieldOperand(elements_array, JSObject::kMapOffset),
   1076           FACTORY->fixed_array_map());
   1077   Label not_empty_2, fill_array;
   1078   __ SmiTest(array_size);
   1079   __ j(not_zero, &not_empty_2);
   1080   // Length of the FixedArray is the number of pre-allocated elements even
   1081   // though the actual JSArray has length 0.
   1082   __ Move(FieldOperand(elements_array, FixedArray::kLengthOffset),
   1083           Smi::FromInt(kPreallocatedArrayElements));
   1084   __ jmp(&fill_array);
   1085   __ bind(&not_empty_2);
   1086   // For non-empty JSArrays the length of the FixedArray and the JSArray is the
   1087   // same.
   1088   __ movq(FieldOperand(elements_array, FixedArray::kLengthOffset), array_size);
   1089 
   1090   // Fill the allocated FixedArray with the hole value if requested.
   1091   // result: JSObject
   1092   // elements_array: elements array
   1093   // elements_array_end: start of next object
   1094   __ bind(&fill_array);
   1095   if (fill_with_hole) {
   1096     Label loop, entry;
   1097     __ Move(scratch, FACTORY->the_hole_value());
   1098     __ lea(elements_array, Operand(elements_array,
   1099                                    FixedArray::kHeaderSize - kHeapObjectTag));
   1100     __ jmp(&entry);
   1101     __ bind(&loop);
   1102     __ movq(Operand(elements_array, 0), scratch);
   1103     __ addq(elements_array, Immediate(kPointerSize));
   1104     __ bind(&entry);
   1105     __ cmpq(elements_array, elements_array_end);
   1106     __ j(below, &loop);
   1107   }
   1108 }
   1109 
   1110 
   1111 // Create a new array for the built-in Array function. This function allocates
   1112 // the JSArray object and the FixedArray elements array and initializes these.
   1113 // If the Array cannot be constructed in native code the runtime is called. This
   1114 // function assumes the following state:
   1115 //   rdi: constructor (built-in Array function)
   1116 //   rax: argc
   1117 //   rsp[0]: return address
   1118 //   rsp[8]: last argument
   1119 // This function is used for both construct and normal calls of Array. The only
   1120 // difference between handling a construct call and a normal call is that for a
   1121 // construct call the constructor function in rdi needs to be preserved for
   1122 // entering the generic code. In both cases argc in rax needs to be preserved.
   1123 // Both registers are preserved by this code so no need to differentiate between
   1124 // a construct call and a normal call.
   1125 static void ArrayNativeCode(MacroAssembler* masm,
   1126                             Label *call_generic_code) {
   1127   Label argc_one_or_more, argc_two_or_more;
   1128 
   1129   // Check for array construction with zero arguments.
   1130   __ testq(rax, rax);
   1131   __ j(not_zero, &argc_one_or_more);
   1132 
   1133   // Handle construction of an empty array.
   1134   AllocateEmptyJSArray(masm,
   1135                        rdi,
   1136                        rbx,
   1137                        rcx,
   1138                        rdx,
   1139                        r8,
   1140                        kPreallocatedArrayElements,
   1141                        call_generic_code);
   1142   Counters* counters = masm->isolate()->counters();
   1143   __ IncrementCounter(counters->array_function_native(), 1);
   1144   __ movq(rax, rbx);
   1145   __ ret(kPointerSize);
   1146 
   1147   // Check for one argument. Bail out if argument is not smi or if it is
   1148   // negative.
   1149   __ bind(&argc_one_or_more);
   1150   __ cmpq(rax, Immediate(1));
   1151   __ j(not_equal, &argc_two_or_more);
   1152   __ movq(rdx, Operand(rsp, kPointerSize));  // Get the argument from the stack.
   1153   __ JumpUnlessNonNegativeSmi(rdx, call_generic_code);
   1154 
   1155   // Handle construction of an empty array of a certain size. Bail out if size
   1156   // is to large to actually allocate an elements array.
   1157   __ SmiCompare(rdx, Smi::FromInt(JSObject::kInitialMaxFastElementArray));
   1158   __ j(greater_equal, call_generic_code);
   1159 
   1160   // rax: argc
   1161   // rdx: array_size (smi)
   1162   // rdi: constructor
   1163   // esp[0]: return address
   1164   // esp[8]: argument
   1165   AllocateJSArray(masm,
   1166                   rdi,
   1167                   rdx,
   1168                   rbx,
   1169                   rcx,
   1170                   r8,
   1171                   r9,
   1172                   true,
   1173                   call_generic_code);
   1174   __ IncrementCounter(counters->array_function_native(), 1);
   1175   __ movq(rax, rbx);
   1176   __ ret(2 * kPointerSize);
   1177 
   1178   // Handle construction of an array from a list of arguments.
   1179   __ bind(&argc_two_or_more);
   1180   __ movq(rdx, rax);
   1181   __ Integer32ToSmi(rdx, rdx);  // Convet argc to a smi.
   1182   // rax: argc
   1183   // rdx: array_size (smi)
   1184   // rdi: constructor
   1185   // esp[0] : return address
   1186   // esp[8] : last argument
   1187   AllocateJSArray(masm,
   1188                   rdi,
   1189                   rdx,
   1190                   rbx,
   1191                   rcx,
   1192                   r8,
   1193                   r9,
   1194                   false,
   1195                   call_generic_code);
   1196   __ IncrementCounter(counters->array_function_native(), 1);
   1197 
   1198   // rax: argc
   1199   // rbx: JSArray
   1200   // rcx: elements_array
   1201   // r8: elements_array_end (untagged)
   1202   // esp[0]: return address
   1203   // esp[8]: last argument
   1204 
   1205   // Location of the last argument
   1206   __ lea(r9, Operand(rsp, kPointerSize));
   1207 
   1208   // Location of the first array element (Parameter fill_with_holes to
   1209   // AllocateJSArrayis false, so the FixedArray is returned in rcx).
   1210   __ lea(rdx, Operand(rcx, FixedArray::kHeaderSize - kHeapObjectTag));
   1211 
   1212   // rax: argc
   1213   // rbx: JSArray
   1214   // rdx: location of the first array element
   1215   // r9: location of the last argument
   1216   // esp[0]: return address
   1217   // esp[8]: last argument
   1218   Label loop, entry;
   1219   __ movq(rcx, rax);
   1220   __ jmp(&entry);
   1221   __ bind(&loop);
   1222   __ movq(kScratchRegister, Operand(r9, rcx, times_pointer_size, 0));
   1223   __ movq(Operand(rdx, 0), kScratchRegister);
   1224   __ addq(rdx, Immediate(kPointerSize));
   1225   __ bind(&entry);
   1226   __ decq(rcx);
   1227   __ j(greater_equal, &loop);
   1228 
   1229   // Remove caller arguments from the stack and return.
   1230   // rax: argc
   1231   // rbx: JSArray
   1232   // esp[0]: return address
   1233   // esp[8]: last argument
   1234   __ pop(rcx);
   1235   __ lea(rsp, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
   1236   __ push(rcx);
   1237   __ movq(rax, rbx);
   1238   __ ret(0);
   1239 }
   1240 
   1241 
   1242 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
   1243   // ----------- S t a t e -------------
   1244   //  -- rax : argc
   1245   //  -- rsp[0] : return address
   1246   //  -- rsp[8] : last argument
   1247   // -----------------------------------
   1248   Label generic_array_code;
   1249 
   1250   // Get the Array function.
   1251   __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rdi);
   1252 
   1253   if (FLAG_debug_code) {
   1254     // Initial map for the builtin Array functions should be maps.
   1255     __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
   1256     // Will both indicate a NULL and a Smi.
   1257     ASSERT(kSmiTag == 0);
   1258     Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
   1259     __ Check(not_smi, "Unexpected initial map for Array function");
   1260     __ CmpObjectType(rbx, MAP_TYPE, rcx);
   1261     __ Check(equal, "Unexpected initial map for Array function");
   1262   }
   1263 
   1264   // Run the native code for the Array function called as a normal function.
   1265   ArrayNativeCode(masm, &generic_array_code);
   1266 
   1267   // Jump to the generic array code in case the specialized code cannot handle
   1268   // the construction.
   1269   __ bind(&generic_array_code);
   1270   Handle<Code> array_code =
   1271       masm->isolate()->builtins()->ArrayCodeGeneric();
   1272   __ Jump(array_code, RelocInfo::CODE_TARGET);
   1273 }
   1274 
   1275 
   1276 void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
   1277   // ----------- S t a t e -------------
   1278   //  -- rax : argc
   1279   //  -- rdi : constructor
   1280   //  -- rsp[0] : return address
   1281   //  -- rsp[8] : last argument
   1282   // -----------------------------------
   1283   Label generic_constructor;
   1284 
   1285   if (FLAG_debug_code) {
   1286     // The array construct code is only set for the builtin and internal
   1287     // Array functions which always have a map.
   1288     // Initial map for the builtin Array function should be a map.
   1289     __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
   1290     // Will both indicate a NULL and a Smi.
   1291     ASSERT(kSmiTag == 0);
   1292     Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
   1293     __ Check(not_smi, "Unexpected initial map for Array function");
   1294     __ CmpObjectType(rbx, MAP_TYPE, rcx);
   1295     __ Check(equal, "Unexpected initial map for Array function");
   1296   }
   1297 
   1298   // Run the native code for the Array function called as constructor.
   1299   ArrayNativeCode(masm, &generic_constructor);
   1300 
   1301   // Jump to the generic construct code in case the specialized code cannot
   1302   // handle the construction.
   1303   __ bind(&generic_constructor);
   1304   Handle<Code> generic_construct_stub =
   1305       masm->isolate()->builtins()->JSConstructStubGeneric();
   1306   __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
   1307 }
   1308 
   1309 
   1310 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
   1311   // TODO(849): implement custom construct stub.
   1312   // Generate a copy of the generic stub for now.
   1313   Generate_JSConstructStubGeneric(masm);
   1314 }
   1315 
   1316 
   1317 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
   1318   __ push(rbp);
   1319   __ movq(rbp, rsp);
   1320 
   1321   // Store the arguments adaptor context sentinel.
   1322   __ Push(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
   1323 
   1324   // Push the function on the stack.
   1325   __ push(rdi);
   1326 
   1327   // Preserve the number of arguments on the stack. Must preserve both
   1328   // rax and rbx because these registers are used when copying the
   1329   // arguments and the receiver.
   1330   __ Integer32ToSmi(rcx, rax);
   1331   __ push(rcx);
   1332 }
   1333 
   1334 
   1335 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
   1336   // Retrieve the number of arguments from the stack. Number is a Smi.
   1337   __ movq(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
   1338 
   1339   // Leave the frame.
   1340   __ movq(rsp, rbp);
   1341   __ pop(rbp);
   1342 
   1343   // Remove caller arguments from the stack.
   1344   __ pop(rcx);
   1345   SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
   1346   __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
   1347   __ push(rcx);
   1348 }
   1349 
   1350 
   1351 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
   1352   // ----------- S t a t e -------------
   1353   //  -- rax : actual number of arguments
   1354   //  -- rbx : expected number of arguments
   1355   //  -- rdx : code entry to call
   1356   // -----------------------------------
   1357 
   1358   Label invoke, dont_adapt_arguments;
   1359   Counters* counters = masm->isolate()->counters();
   1360   __ IncrementCounter(counters->arguments_adaptors(), 1);
   1361 
   1362   Label enough, too_few;
   1363   __ cmpq(rax, rbx);
   1364   __ j(less, &too_few);
   1365   __ cmpq(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
   1366   __ j(equal, &dont_adapt_arguments);
   1367 
   1368   {  // Enough parameters: Actual >= expected.
   1369     __ bind(&enough);
   1370     EnterArgumentsAdaptorFrame(masm);
   1371 
   1372     // Copy receiver and all expected arguments.
   1373     const int offset = StandardFrameConstants::kCallerSPOffset;
   1374     __ lea(rax, Operand(rbp, rax, times_pointer_size, offset));
   1375     __ Set(rcx, -1);  // account for receiver
   1376 
   1377     Label copy;
   1378     __ bind(&copy);
   1379     __ incq(rcx);
   1380     __ push(Operand(rax, 0));
   1381     __ subq(rax, Immediate(kPointerSize));
   1382     __ cmpq(rcx, rbx);
   1383     __ j(less, &copy);
   1384     __ jmp(&invoke);
   1385   }
   1386 
   1387   {  // Too few parameters: Actual < expected.
   1388     __ bind(&too_few);
   1389     EnterArgumentsAdaptorFrame(masm);
   1390 
   1391     // Copy receiver and all actual arguments.
   1392     const int offset = StandardFrameConstants::kCallerSPOffset;
   1393     __ lea(rdi, Operand(rbp, rax, times_pointer_size, offset));
   1394     __ Set(rcx, -1);  // account for receiver
   1395 
   1396     Label copy;
   1397     __ bind(&copy);
   1398     __ incq(rcx);
   1399     __ push(Operand(rdi, 0));
   1400     __ subq(rdi, Immediate(kPointerSize));
   1401     __ cmpq(rcx, rax);
   1402     __ j(less, &copy);
   1403 
   1404     // Fill remaining expected arguments with undefined values.
   1405     Label fill;
   1406     __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
   1407     __ bind(&fill);
   1408     __ incq(rcx);
   1409     __ push(kScratchRegister);
   1410     __ cmpq(rcx, rbx);
   1411     __ j(less, &fill);
   1412 
   1413     // Restore function pointer.
   1414     __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
   1415   }
   1416 
   1417   // Call the entry point.
   1418   __ bind(&invoke);
   1419   __ call(rdx);
   1420 
   1421   // Leave frame and return.
   1422   LeaveArgumentsAdaptorFrame(masm);
   1423   __ ret(0);
   1424 
   1425   // -------------------------------------------
   1426   // Dont adapt arguments.
   1427   // -------------------------------------------
   1428   __ bind(&dont_adapt_arguments);
   1429   __ jmp(rdx);
   1430 }
   1431 
   1432 
   1433 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
   1434   // Get the loop depth of the stack guard check. This is recorded in
   1435   // a test(rax, depth) instruction right after the call.
   1436   Label stack_check;
   1437   __ movq(rbx, Operand(rsp, 0));  // return address
   1438   __ movzxbq(rbx, Operand(rbx, 1));  // depth
   1439 
   1440   // Get the loop nesting level at which we allow OSR from the
   1441   // unoptimized code and check if we want to do OSR yet. If not we
   1442   // should perform a stack guard check so we can get interrupts while
   1443   // waiting for on-stack replacement.
   1444   __ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
   1445   __ movq(rcx, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset));
   1446   __ movq(rcx, FieldOperand(rcx, SharedFunctionInfo::kCodeOffset));
   1447   __ cmpb(rbx, FieldOperand(rcx, Code::kAllowOSRAtLoopNestingLevelOffset));
   1448   __ j(greater, &stack_check);
   1449 
   1450   // Pass the function to optimize as the argument to the on-stack
   1451   // replacement runtime function.
   1452   __ EnterInternalFrame();
   1453   __ push(rax);
   1454   __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
   1455   __ LeaveInternalFrame();
   1456 
   1457   // If the result was -1 it means that we couldn't optimize the
   1458   // function. Just return and continue in the unoptimized version.
   1459   NearLabel skip;
   1460   __ SmiCompare(rax, Smi::FromInt(-1));
   1461   __ j(not_equal, &skip);
   1462   __ ret(0);
   1463 
   1464   // If we decide not to perform on-stack replacement we perform a
   1465   // stack guard check to enable interrupts.
   1466   __ bind(&stack_check);
   1467   NearLabel ok;
   1468   __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
   1469   __ j(above_equal, &ok);
   1470 
   1471   StackCheckStub stub;
   1472   __ TailCallStub(&stub);
   1473   __ Abort("Unreachable code: returned from tail call.");
   1474   __ bind(&ok);
   1475   __ ret(0);
   1476 
   1477   __ bind(&skip);
   1478   // Untag the AST id and push it on the stack.
   1479   __ SmiToInteger32(rax, rax);
   1480   __ push(rax);
   1481 
   1482   // Generate the code for doing the frame-to-frame translation using
   1483   // the deoptimizer infrastructure.
   1484   Deoptimizer::EntryGenerator generator(masm, Deoptimizer::OSR);
   1485   generator.Generate();
   1486 }
   1487 
   1488 
   1489 #undef __
   1490 
   1491 } }  // namespace v8::internal
   1492 
   1493 #endif  // V8_TARGET_ARCH_X64
   1494