Home | History | Annotate | Download | only in arm64
      1 // Copyright 2013 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #if V8_TARGET_ARCH_ARM64
      6 
      7 #include "src/arm64/frames-arm64.h"
      8 #include "src/codegen.h"
      9 #include "src/debug/debug.h"
     10 #include "src/deoptimizer.h"
     11 #include "src/full-codegen/full-codegen.h"
     12 #include "src/runtime/runtime.h"
     13 
     14 namespace v8 {
     15 namespace internal {
     16 
     17 
     18 #define __ ACCESS_MASM(masm)
     19 
     20 
     21 // Load the built-in Array function from the current context.
     22 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
     23   // Load the InternalArray function from the native context.
     24   __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result);
     25 }
     26 
     27 
     28 // Load the built-in InternalArray function from the current context.
     29 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
     30                                               Register result) {
     31   // Load the InternalArray function from the native context.
     32   __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result);
     33 }
     34 
     35 void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id) {
     36   // ----------- S t a t e -------------
     37   //  -- x0                 : number of arguments excluding receiver
     38   //  -- x1                 : target
     39   //  -- x3                 : new target
     40   //  -- sp[0]              : last argument
     41   //  -- ...
     42   //  -- sp[4 * (argc - 1)] : first argument
     43   //  -- sp[4 * argc]       : receiver
     44   // -----------------------------------
     45   __ AssertFunction(x1);
     46 
     47   // Make sure we operate in the context of the called function (for example
     48   // ConstructStubs implemented in C++ will be run in the context of the caller
     49   // instead of the callee, due to the way that [[Construct]] is defined for
     50   // ordinary functions).
     51   __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset));
     52 
     53   // Insert extra arguments.
     54   const int num_extra_args = 2;
     55   __ Push(x1, x3);
     56 
     57   // JumpToExternalReference expects x0 to contain the number of arguments
     58   // including the receiver and the extra arguments.
     59   __ Add(x0, x0, num_extra_args + 1);
     60 
     61   __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
     62 }
     63 
     64 
     65 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
     66   // ----------- S t a t e -------------
     67   //  -- x0     : number of arguments
     68   //  -- lr     : return address
     69   //  -- sp[...]: constructor arguments
     70   // -----------------------------------
     71   ASM_LOCATION("Builtins::Generate_InternalArrayCode");
     72   Label generic_array_code;
     73 
     74   // Get the InternalArray function.
     75   GenerateLoadInternalArrayFunction(masm, x1);
     76 
     77   if (FLAG_debug_code) {
     78     // Initial map for the builtin InternalArray functions should be maps.
     79     __ Ldr(x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset));
     80     __ Tst(x10, kSmiTagMask);
     81     __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction);
     82     __ CompareObjectType(x10, x11, x12, MAP_TYPE);
     83     __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
     84   }
     85 
     86   // Run the native code for the InternalArray function called as a normal
     87   // function.
     88   InternalArrayConstructorStub stub(masm->isolate());
     89   __ TailCallStub(&stub);
     90 }
     91 
     92 
     93 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
     94   // ----------- S t a t e -------------
     95   //  -- x0     : number of arguments
     96   //  -- lr     : return address
     97   //  -- sp[...]: constructor arguments
     98   // -----------------------------------
     99   ASM_LOCATION("Builtins::Generate_ArrayCode");
    100   Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
    101 
    102   // Get the Array function.
    103   GenerateLoadArrayFunction(masm, x1);
    104 
    105   if (FLAG_debug_code) {
    106     // Initial map for the builtin Array functions should be maps.
    107     __ Ldr(x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset));
    108     __ Tst(x10, kSmiTagMask);
    109     __ Assert(ne, kUnexpectedInitialMapForArrayFunction);
    110     __ CompareObjectType(x10, x11, x12, MAP_TYPE);
    111     __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
    112   }
    113 
    114   // Run the native code for the Array function called as a normal function.
    115   __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
    116   __ Mov(x3, x1);
    117   ArrayConstructorStub stub(masm->isolate());
    118   __ TailCallStub(&stub);
    119 }
    120 
    121 
    122 // static
    123 void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
    124   // ----------- S t a t e -------------
    125   //  -- x0                 : number of arguments
    126   //  -- x1                 : function
    127   //  -- cp                 : context
    128   //  -- lr                 : return address
    129   //  -- sp[(argc - n) * 8] : arg[n] (zero-based)
    130   //  -- sp[(argc + 1) * 8] : receiver
    131   // -----------------------------------
    132   ASM_LOCATION("Builtins::Generate_MathMaxMin");
    133 
    134   Heap::RootListIndex const root_index =
    135       (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
    136                                      : Heap::kMinusInfinityValueRootIndex;
    137 
    138   // Load the accumulator with the default return value (either -Infinity or
    139   // +Infinity), with the tagged value in x5 and the double value in d5.
    140   __ LoadRoot(x5, root_index);
    141   __ Ldr(d5, FieldMemOperand(x5, HeapNumber::kValueOffset));
    142 
    143   // Remember how many slots to drop (including the receiver).
    144   __ Add(x4, x0, 1);
    145 
    146   Label done_loop, loop;
    147   __ Bind(&loop);
    148   {
    149     // Check if all parameters done.
    150     __ Subs(x0, x0, 1);
    151     __ B(lt, &done_loop);
    152 
    153     // Load the next parameter tagged value into x2.
    154     __ Peek(x2, Operand(x0, LSL, kPointerSizeLog2));
    155 
    156     // Load the double value of the parameter into d2, maybe converting the
    157     // parameter to a number first using the ToNumber builtin if necessary.
    158     Label convert_smi, convert_number, done_convert;
    159     __ JumpIfSmi(x2, &convert_smi);
    160     __ JumpIfHeapNumber(x2, &convert_number);
    161     {
    162       // Parameter is not a Number, use the ToNumber builtin to convert it.
    163       FrameScope scope(masm, StackFrame::MANUAL);
    164       __ Push(lr, fp);
    165       __ Move(fp, jssp);
    166       __ Push(cp, x1);
    167       __ SmiTag(x0);
    168       __ SmiTag(x4);
    169       __ Push(x0, x5, x4);
    170       __ Mov(x0, x2);
    171       __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
    172       __ Mov(x2, x0);
    173       __ Pop(x4, x5, x0);
    174       {
    175         // Restore the double accumulator value (d5).
    176         Label done_restore;
    177         __ SmiUntagToDouble(d5, x5, kSpeculativeUntag);
    178         __ JumpIfSmi(x5, &done_restore);
    179         __ Ldr(d5, FieldMemOperand(x5, HeapNumber::kValueOffset));
    180         __ Bind(&done_restore);
    181       }
    182       __ SmiUntag(x4);
    183       __ SmiUntag(x0);
    184       __ Pop(x1, cp, fp, lr);
    185     }
    186     __ AssertNumber(x2);
    187     __ JumpIfSmi(x2, &convert_smi);
    188 
    189     __ Bind(&convert_number);
    190     __ Ldr(d2, FieldMemOperand(x2, HeapNumber::kValueOffset));
    191     __ B(&done_convert);
    192 
    193     __ Bind(&convert_smi);
    194     __ SmiUntagToDouble(d2, x2);
    195     __ Bind(&done_convert);
    196 
    197     // We can use a single fmin/fmax for the operation itself, but we then need
    198     // to work out which HeapNumber (or smi) the result came from.
    199     __ Fmov(x11, d5);
    200     if (kind == MathMaxMinKind::kMin) {
    201       __ Fmin(d5, d5, d2);
    202     } else {
    203       DCHECK(kind == MathMaxMinKind::kMax);
    204       __ Fmax(d5, d5, d2);
    205     }
    206     __ Fmov(x10, d5);
    207     __ Cmp(x10, x11);
    208     __ Csel(x5, x5, x2, eq);
    209     __ B(&loop);
    210   }
    211 
    212   __ Bind(&done_loop);
    213   __ Drop(x4);
    214   __ Mov(x0, x5);
    215   __ Ret();
    216 }
    217 
    218 // static
    219 void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
    220   // ----------- S t a t e -------------
    221   //  -- x0                     : number of arguments
    222   //  -- x1                     : constructor function
    223   //  -- lr                     : return address
    224   //  -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
    225   //  -- sp[argc * 8]           : receiver
    226   // -----------------------------------
    227   ASM_LOCATION("Builtins::Generate_NumberConstructor");
    228 
    229   // 1. Load the first argument into x0 and get rid of the rest (including the
    230   // receiver).
    231   Label no_arguments;
    232   {
    233     __ Cbz(x0, &no_arguments);
    234     __ Sub(x0, x0, 1);
    235     __ Drop(x0);
    236     __ Ldr(x0, MemOperand(jssp, 2 * kPointerSize, PostIndex));
    237   }
    238 
    239   // 2a. Convert first argument to number.
    240   __ Jump(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
    241 
    242   // 2b. No arguments, return +0 (already in x0).
    243   __ Bind(&no_arguments);
    244   __ Drop(1);
    245   __ Ret();
    246 }
    247 
    248 
    249 // static
    250 void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
    251   // ----------- S t a t e -------------
    252   //  -- x0                     : number of arguments
    253   //  -- x1                     : constructor function
    254   //  -- x3                     : new target
    255   //  -- lr                     : return address
    256   //  -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
    257   //  -- sp[argc * 8]           : receiver
    258   // -----------------------------------
    259   ASM_LOCATION("Builtins::Generate_NumberConstructor_ConstructStub");
    260 
    261   // 1. Make sure we operate in the context of the called function.
    262   __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset));
    263 
    264   // 2. Load the first argument into x2 and get rid of the rest (including the
    265   // receiver).
    266   {
    267     Label no_arguments, done;
    268     __ Cbz(x0, &no_arguments);
    269     __ Sub(x0, x0, 1);
    270     __ Drop(x0);
    271     __ Ldr(x2, MemOperand(jssp, 2 * kPointerSize, PostIndex));
    272     __ B(&done);
    273     __ Bind(&no_arguments);
    274     __ Drop(1);
    275     __ Mov(x2, Smi::FromInt(0));
    276     __ Bind(&done);
    277   }
    278 
    279   // 3. Make sure x2 is a number.
    280   {
    281     Label done_convert;
    282     __ JumpIfSmi(x2, &done_convert);
    283     __ JumpIfObjectType(x2, x4, x4, HEAP_NUMBER_TYPE, &done_convert, eq);
    284     {
    285       FrameScope scope(masm, StackFrame::INTERNAL);
    286       __ Push(x1, x3);
    287       __ Move(x0, x2);
    288       __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
    289       __ Move(x2, x0);
    290       __ Pop(x3, x1);
    291     }
    292     __ Bind(&done_convert);
    293   }
    294 
    295   // 4. Check if new target and constructor differ.
    296   Label new_object;
    297   __ Cmp(x1, x3);
    298   __ B(ne, &new_object);
    299 
    300   // 5. Allocate a JSValue wrapper for the number.
    301   __ AllocateJSValue(x0, x1, x2, x4, x5, &new_object);
    302   __ Ret();
    303 
    304   // 6. Fallback to the runtime to create new object.
    305   __ bind(&new_object);
    306   {
    307     FrameScope scope(masm, StackFrame::INTERNAL);
    308     __ Push(x2);  // first argument
    309     FastNewObjectStub stub(masm->isolate());
    310     __ CallStub(&stub);
    311     __ Pop(x2);
    312   }
    313   __ Str(x2, FieldMemOperand(x0, JSValue::kValueOffset));
    314   __ Ret();
    315 }
    316 
    317 
    318 // static
    319 void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
    320   // ----------- S t a t e -------------
    321   //  -- x0                     : number of arguments
    322   //  -- x1                     : constructor function
    323   //  -- lr                     : return address
    324   //  -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
    325   //  -- sp[argc * 8]           : receiver
    326   // -----------------------------------
    327   ASM_LOCATION("Builtins::Generate_StringConstructor");
    328 
    329   // 1. Load the first argument into x0 and get rid of the rest (including the
    330   // receiver).
    331   Label no_arguments;
    332   {
    333     __ Cbz(x0, &no_arguments);
    334     __ Sub(x0, x0, 1);
    335     __ Drop(x0);
    336     __ Ldr(x0, MemOperand(jssp, 2 * kPointerSize, PostIndex));
    337   }
    338 
    339   // 2a. At least one argument, return x0 if it's a string, otherwise
    340   // dispatch to appropriate conversion.
    341   Label to_string, symbol_descriptive_string;
    342   {
    343     __ JumpIfSmi(x0, &to_string);
    344     STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
    345     __ CompareObjectType(x0, x1, x1, FIRST_NONSTRING_TYPE);
    346     __ B(hi, &to_string);
    347     __ B(eq, &symbol_descriptive_string);
    348     __ Ret();
    349   }
    350 
    351   // 2b. No arguments, return the empty string (and pop the receiver).
    352   __ Bind(&no_arguments);
    353   {
    354     __ LoadRoot(x0, Heap::kempty_stringRootIndex);
    355     __ Drop(1);
    356     __ Ret();
    357   }
    358 
    359   // 3a. Convert x0 to a string.
    360   __ Bind(&to_string);
    361   {
    362     ToStringStub stub(masm->isolate());
    363     __ TailCallStub(&stub);
    364   }
    365 
    366   // 3b. Convert symbol in x0 to a string.
    367   __ Bind(&symbol_descriptive_string);
    368   {
    369     __ Push(x0);
    370     __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
    371   }
    372 }
    373 
    374 
    375 // static
    376 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
    377   // ----------- S t a t e -------------
    378   //  -- x0                     : number of arguments
    379   //  -- x1                     : constructor function
    380   //  -- x3                     : new target
    381   //  -- lr                     : return address
    382   //  -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
    383   //  -- sp[argc * 8]           : receiver
    384   // -----------------------------------
    385   ASM_LOCATION("Builtins::Generate_StringConstructor_ConstructStub");
    386 
    387   // 1. Make sure we operate in the context of the called function.
    388   __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset));
    389 
    390   // 2. Load the first argument into x2 and get rid of the rest (including the
    391   // receiver).
    392   {
    393     Label no_arguments, done;
    394     __ Cbz(x0, &no_arguments);
    395     __ Sub(x0, x0, 1);
    396     __ Drop(x0);
    397     __ Ldr(x2, MemOperand(jssp, 2 * kPointerSize, PostIndex));
    398     __ B(&done);
    399     __ Bind(&no_arguments);
    400     __ Drop(1);
    401     __ LoadRoot(x2, Heap::kempty_stringRootIndex);
    402     __ Bind(&done);
    403   }
    404 
    405   // 3. Make sure x2 is a string.
    406   {
    407     Label convert, done_convert;
    408     __ JumpIfSmi(x2, &convert);
    409     __ JumpIfObjectType(x2, x4, x4, FIRST_NONSTRING_TYPE, &done_convert, lo);
    410     __ Bind(&convert);
    411     {
    412       FrameScope scope(masm, StackFrame::INTERNAL);
    413       ToStringStub stub(masm->isolate());
    414       __ Push(x1, x3);
    415       __ Move(x0, x2);
    416       __ CallStub(&stub);
    417       __ Move(x2, x0);
    418       __ Pop(x3, x1);
    419     }
    420     __ Bind(&done_convert);
    421   }
    422 
    423   // 4. Check if new target and constructor differ.
    424   Label new_object;
    425   __ Cmp(x1, x3);
    426   __ B(ne, &new_object);
    427 
    428   // 5. Allocate a JSValue wrapper for the string.
    429   __ AllocateJSValue(x0, x1, x2, x4, x5, &new_object);
    430   __ Ret();
    431 
    432   // 6. Fallback to the runtime to create new object.
    433   __ bind(&new_object);
    434   {
    435     FrameScope scope(masm, StackFrame::INTERNAL);
    436     __ Push(x2);  // first argument
    437     FastNewObjectStub stub(masm->isolate());
    438     __ CallStub(&stub);
    439     __ Pop(x2);
    440   }
    441   __ Str(x2, FieldMemOperand(x0, JSValue::kValueOffset));
    442   __ Ret();
    443 }
    444 
    445 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
    446   __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
    447   __ Ldr(x2, FieldMemOperand(x2, SharedFunctionInfo::kCodeOffset));
    448   __ Add(x2, x2, Code::kHeaderSize - kHeapObjectTag);
    449   __ Br(x2);
    450 }
    451 
    452 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
    453                                            Runtime::FunctionId function_id) {
    454   // ----------- S t a t e -------------
    455   //  -- x0 : argument count (preserved for callee)
    456   //  -- x1 : target function (preserved for callee)
    457   //  -- x3 : new target (preserved for callee)
    458   // -----------------------------------
    459   {
    460     FrameScope scope(masm, StackFrame::INTERNAL);
    461     // Push a copy of the target function and the new target.
    462     // Push another copy as a parameter to the runtime call.
    463     __ SmiTag(x0);
    464     __ Push(x0, x1, x3, x1);
    465 
    466     __ CallRuntime(function_id, 1);
    467     __ Move(x2, x0);
    468 
    469     // Restore target function and new target.
    470     __ Pop(x3, x1, x0);
    471     __ SmiUntag(x0);
    472   }
    473 
    474   __ Add(x2, x2, Code::kHeaderSize - kHeapObjectTag);
    475   __ Br(x2);
    476 }
    477 
    478 
    479 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
    480   // Checking whether the queued function is ready for install is optional,
    481   // since we come across interrupts and stack checks elsewhere. However, not
    482   // checking may delay installing ready functions, and always checking would be
    483   // quite expensive. A good compromise is to first check against stack limit as
    484   // a cue for an interrupt signal.
    485   Label ok;
    486   __ CompareRoot(masm->StackPointer(), Heap::kStackLimitRootIndex);
    487   __ B(hs, &ok);
    488 
    489   GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
    490 
    491   __ Bind(&ok);
    492   GenerateTailCallToSharedCode(masm);
    493 }
    494 
    495 
    496 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
    497                                            bool is_api_function,
    498                                            bool create_implicit_receiver,
    499                                            bool check_derived_construct) {
    500   // ----------- S t a t e -------------
    501   //  -- x0     : number of arguments
    502   //  -- x1     : constructor function
    503   //  -- x2     : allocation site or undefined
    504   //  -- x3     : new target
    505   //  -- lr     : return address
    506   //  -- cp     : context pointer
    507   //  -- sp[...]: constructor arguments
    508   // -----------------------------------
    509 
    510   ASM_LOCATION("Builtins::Generate_JSConstructStubHelper");
    511 
    512   Isolate* isolate = masm->isolate();
    513 
    514   // Enter a construct frame.
    515   {
    516     FrameScope scope(masm, StackFrame::CONSTRUCT);
    517 
    518     // Preserve the four incoming parameters on the stack.
    519     Register argc = x0;
    520     Register constructor = x1;
    521     Register allocation_site = x2;
    522     Register new_target = x3;
    523 
    524     // Preserve the incoming parameters on the stack.
    525     __ AssertUndefinedOrAllocationSite(allocation_site, x10);
    526     __ Push(cp);
    527     __ SmiTag(argc);
    528     __ Push(allocation_site, argc);
    529 
    530     if (create_implicit_receiver) {
    531       // Allocate the new receiver object.
    532       __ Push(constructor, new_target);
    533       FastNewObjectStub stub(masm->isolate());
    534       __ CallStub(&stub);
    535       __ Mov(x4, x0);
    536       __ Pop(new_target, constructor);
    537 
    538       // ----------- S t a t e -------------
    539       //  -- x1: constructor function
    540       //  -- x3: new target
    541       //  -- x4: newly allocated object
    542       // -----------------------------------
    543 
    544       // Reload the number of arguments from the stack.
    545       // Set it up in x0 for the function call below.
    546       // jssp[0]: number of arguments (smi-tagged)
    547       __ Peek(argc, 0);  // Load number of arguments.
    548     }
    549 
    550     __ SmiUntag(argc);
    551 
    552     if (create_implicit_receiver) {
    553       // Push the allocated receiver to the stack. We need two copies
    554       // because we may have to return the original one and the calling
    555       // conventions dictate that the called function pops the receiver.
    556       __ Push(x4, x4);
    557     } else {
    558       __ PushRoot(Heap::kTheHoleValueRootIndex);
    559     }
    560 
    561     // Set up pointer to last argument.
    562     __ Add(x2, fp, StandardFrameConstants::kCallerSPOffset);
    563 
    564     // Copy arguments and receiver to the expression stack.
    565     // Copy 2 values every loop to use ldp/stp.
    566     // x0: number of arguments
    567     // x1: constructor function
    568     // x2: address of last argument (caller sp)
    569     // x3: new target
    570     // jssp[0]: receiver
    571     // jssp[1]: receiver
    572     // jssp[2]: number of arguments (smi-tagged)
    573     // Compute the start address of the copy in x3.
    574     __ Add(x4, x2, Operand(argc, LSL, kPointerSizeLog2));
    575     Label loop, entry, done_copying_arguments;
    576     __ B(&entry);
    577     __ Bind(&loop);
    578     __ Ldp(x10, x11, MemOperand(x4, -2 * kPointerSize, PreIndex));
    579     __ Push(x11, x10);
    580     __ Bind(&entry);
    581     __ Cmp(x4, x2);
    582     __ B(gt, &loop);
    583     // Because we copied values 2 by 2 we may have copied one extra value.
    584     // Drop it if that is the case.
    585     __ B(eq, &done_copying_arguments);
    586     __ Drop(1);
    587     __ Bind(&done_copying_arguments);
    588 
    589     // Call the function.
    590     // x0: number of arguments
    591     // x1: constructor function
    592     // x3: new target
    593     ParameterCount actual(argc);
    594     __ InvokeFunction(constructor, new_target, actual, CALL_FUNCTION,
    595                       CheckDebugStepCallWrapper());
    596 
    597     // Store offset of return address for deoptimizer.
    598     if (create_implicit_receiver && !is_api_function) {
    599       masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
    600     }
    601 
    602     // Restore the context from the frame.
    603     // x0: result
    604     // jssp[0]: receiver
    605     // jssp[1]: number of arguments (smi-tagged)
    606     __ Ldr(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
    607 
    608     if (create_implicit_receiver) {
    609       // If the result is an object (in the ECMA sense), we should get rid
    610       // of the receiver and use the result; see ECMA-262 section 13.2.2-7
    611       // on page 74.
    612       Label use_receiver, exit;
    613 
    614       // If the result is a smi, it is *not* an object in the ECMA sense.
    615       // x0: result
    616       // jssp[0]: receiver (newly allocated object)
    617       // jssp[1]: number of arguments (smi-tagged)
    618       __ JumpIfSmi(x0, &use_receiver);
    619 
    620       // If the type of the result (stored in its map) is less than
    621       // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
    622       __ JumpIfObjectType(x0, x1, x3, FIRST_JS_RECEIVER_TYPE, &exit, ge);
    623 
    624       // Throw away the result of the constructor invocation and use the
    625       // on-stack receiver as the result.
    626       __ Bind(&use_receiver);
    627       __ Peek(x0, 0);
    628 
    629       // Remove the receiver from the stack, remove caller arguments, and
    630       // return.
    631       __ Bind(&exit);
    632       // x0: result
    633       // jssp[0]: receiver (newly allocated object)
    634       // jssp[1]: number of arguments (smi-tagged)
    635       __ Peek(x1, 1 * kXRegSize);
    636     } else {
    637       __ Peek(x1, 0);
    638     }
    639 
    640     // Leave construct frame.
    641   }
    642 
    643   // ES6 9.2.2. Step 13+
    644   // Check that the result is not a Smi, indicating that the constructor result
    645   // from a derived class is neither undefined nor an Object.
    646   if (check_derived_construct) {
    647     Label dont_throw;
    648     __ JumpIfNotSmi(x0, &dont_throw);
    649     {
    650       FrameScope scope(masm, StackFrame::INTERNAL);
    651       __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
    652     }
    653     __ Bind(&dont_throw);
    654   }
    655 
    656   __ DropBySMI(x1);
    657   __ Drop(1);
    658   if (create_implicit_receiver) {
    659     __ IncrementCounter(isolate->counters()->constructed_objects(), 1, x1, x2);
    660   }
    661   __ Ret();
    662 }
    663 
    664 
    665 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
    666   Generate_JSConstructStubHelper(masm, false, true, false);
    667 }
    668 
    669 
    670 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
    671   Generate_JSConstructStubHelper(masm, true, false, false);
    672 }
    673 
    674 
    675 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
    676   Generate_JSConstructStubHelper(masm, false, false, false);
    677 }
    678 
    679 
    680 void Builtins::Generate_JSBuiltinsConstructStubForDerived(
    681     MacroAssembler* masm) {
    682   Generate_JSConstructStubHelper(masm, false, false, true);
    683 }
    684 
    685 
    686 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
    687   FrameScope scope(masm, StackFrame::INTERNAL);
    688   __ Push(x1);
    689   __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
    690 }
    691 
    692 // static
    693 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
    694   // ----------- S t a t e -------------
    695   //  -- x0 : the value to pass to the generator
    696   //  -- x1 : the JSGeneratorObject to resume
    697   //  -- x2 : the resume mode (tagged)
    698   //  -- lr : return address
    699   // -----------------------------------
    700   __ AssertGeneratorObject(x1);
    701 
    702   // Store input value into generator object.
    703   __ Str(x0, FieldMemOperand(x1, JSGeneratorObject::kInputOrDebugPosOffset));
    704   __ RecordWriteField(x1, JSGeneratorObject::kInputOrDebugPosOffset, x0, x3,
    705                       kLRHasNotBeenSaved, kDontSaveFPRegs);
    706 
    707   // Store resume mode into generator object.
    708   __ Str(x2, FieldMemOperand(x1, JSGeneratorObject::kResumeModeOffset));
    709 
    710   // Load suspended function and context.
    711   __ Ldr(cp, FieldMemOperand(x1, JSGeneratorObject::kContextOffset));
    712   __ Ldr(x4, FieldMemOperand(x1, JSGeneratorObject::kFunctionOffset));
    713 
    714   // Flood function if we are stepping.
    715   Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
    716   Label stepping_prepared;
    717   ExternalReference last_step_action =
    718       ExternalReference::debug_last_step_action_address(masm->isolate());
    719   STATIC_ASSERT(StepFrame > StepIn);
    720   __ Mov(x10, Operand(last_step_action));
    721   __ Ldrsb(x10, MemOperand(x10));
    722   __ CompareAndBranch(x10, Operand(StepIn), ge, &prepare_step_in_if_stepping);
    723 
    724   // Flood function if we need to continue stepping in the suspended generator.
    725   ExternalReference debug_suspended_generator =
    726       ExternalReference::debug_suspended_generator_address(masm->isolate());
    727   __ Mov(x10, Operand(debug_suspended_generator));
    728   __ Ldr(x10, MemOperand(x10));
    729   __ CompareAndBranch(x10, Operand(x1), eq,
    730                       &prepare_step_in_suspended_generator);
    731   __ Bind(&stepping_prepared);
    732 
    733   // Push receiver.
    734   __ Ldr(x5, FieldMemOperand(x1, JSGeneratorObject::kReceiverOffset));
    735   __ Push(x5);
    736 
    737   // ----------- S t a t e -------------
    738   //  -- x1      : the JSGeneratorObject to resume
    739   //  -- x2      : the resume mode (tagged)
    740   //  -- x4      : generator function
    741   //  -- cp      : generator context
    742   //  -- lr      : return address
    743   //  -- jssp[0] : generator receiver
    744   // -----------------------------------
    745 
    746   // Push holes for arguments to generator function. Since the parser forced
    747   // context allocation for any variables in generators, the actual argument
    748   // values have already been copied into the context and these dummy values
    749   // will never be used.
    750   __ Ldr(x10, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset));
    751   __ Ldr(w10,
    752          FieldMemOperand(x10, SharedFunctionInfo::kFormalParameterCountOffset));
    753   __ LoadRoot(x11, Heap::kTheHoleValueRootIndex);
    754   __ PushMultipleTimes(x11, w10);
    755 
    756   // Dispatch on the kind of generator object.
    757   Label old_generator;
    758   __ Ldr(x3, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset));
    759   __ Ldr(x3, FieldMemOperand(x3, SharedFunctionInfo::kFunctionDataOffset));
    760   __ CompareObjectType(x3, x3, x3, BYTECODE_ARRAY_TYPE);
    761   __ B(ne, &old_generator);
    762 
    763   // New-style (ignition/turbofan) generator object
    764   {
    765     __ Ldr(x0, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset));
    766     __ Ldr(w0,
    767          FieldMemOperand(x0, SharedFunctionInfo::kFormalParameterCountOffset));
    768     // We abuse new.target both to indicate that this is a resume call and to
    769     // pass in the generator object.  In ordinary calls, new.target is always
    770     // undefined because generator functions are non-constructable.
    771     __ Move(x3, x1);
    772     __ Move(x1, x4);
    773     __ Ldr(x5, FieldMemOperand(x1, JSFunction::kCodeEntryOffset));
    774     __ Jump(x5);
    775   }
    776 
    777   // Old-style (full-codegen) generator object
    778   __ bind(&old_generator);
    779   {
    780     // Enter a new JavaScript frame, and initialize its slots as they were when
    781     // the generator was suspended.
    782     FrameScope scope(masm, StackFrame::MANUAL);
    783     __ Push(lr, fp);
    784     __ Move(fp, jssp);
    785     __ Push(cp, x4);
    786 
    787     // Restore the operand stack.
    788     __ Ldr(x0, FieldMemOperand(x1, JSGeneratorObject::kOperandStackOffset));
    789     __ Ldr(w3, UntagSmiFieldMemOperand(x0, FixedArray::kLengthOffset));
    790     __ Add(x0, x0, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
    791     __ Add(x3, x0, Operand(x3, LSL, kPointerSizeLog2));
    792     {
    793       Label done_loop, loop;
    794       __ Bind(&loop);
    795       __ Cmp(x0, x3);
    796       __ B(eq, &done_loop);
    797       __ Ldr(x10, MemOperand(x0, kPointerSize, PostIndex));
    798       __ Push(x10);
    799       __ B(&loop);
    800       __ Bind(&done_loop);
    801     }
    802 
    803     // Reset operand stack so we don't leak.
    804     __ LoadRoot(x10, Heap::kEmptyFixedArrayRootIndex);
    805     __ Str(x10, FieldMemOperand(x1, JSGeneratorObject::kOperandStackOffset));
    806 
    807     // Resume the generator function at the continuation.
    808     __ Ldr(x10, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset));
    809     __ Ldr(x10, FieldMemOperand(x10, SharedFunctionInfo::kCodeOffset));
    810     __ Add(x10, x10, Code::kHeaderSize - kHeapObjectTag);
    811     __ Ldrsw(x11,
    812         UntagSmiFieldMemOperand(x1, JSGeneratorObject::kContinuationOffset));
    813     __ Add(x10, x10, x11);
    814     __ Mov(x12, Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
    815     __ Str(x12, FieldMemOperand(x1, JSGeneratorObject::kContinuationOffset));
    816     __ Move(x0, x1);  // Continuation expects generator object in x0.
    817     __ Br(x10);
    818   }
    819 
    820   __ Bind(&prepare_step_in_if_stepping);
    821   {
    822     FrameScope scope(masm, StackFrame::INTERNAL);
    823     __ Push(x1, x2, x4);
    824     __ CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
    825     __ Pop(x2, x1);
    826     __ Ldr(x4, FieldMemOperand(x1, JSGeneratorObject::kFunctionOffset));
    827   }
    828   __ B(&stepping_prepared);
    829 
    830   __ Bind(&prepare_step_in_suspended_generator);
    831   {
    832     FrameScope scope(masm, StackFrame::INTERNAL);
    833     __ Push(x1, x2);
    834     __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
    835     __ Pop(x2, x1);
    836     __ Ldr(x4, FieldMemOperand(x1, JSGeneratorObject::kFunctionOffset));
    837   }
    838   __ B(&stepping_prepared);
    839 }
    840 
    841 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
    842 
    843 
    844 // Clobbers x10, x15; preserves all other registers.
    845 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
    846                                         IsTagged argc_is_tagged) {
    847   // Check the stack for overflow.
    848   // We are not trying to catch interruptions (e.g. debug break and
    849   // preemption) here, so the "real stack limit" is checked.
    850   Label enough_stack_space;
    851   __ LoadRoot(x10, Heap::kRealStackLimitRootIndex);
    852   // Make x10 the space we have left. The stack might already be overflowed
    853   // here which will cause x10 to become negative.
    854   // TODO(jbramley): Check that the stack usage here is safe.
    855   __ Sub(x10, jssp, x10);
    856   // Check if the arguments will overflow the stack.
    857   if (argc_is_tagged == kArgcIsSmiTagged) {
    858     __ Cmp(x10, Operand::UntagSmiAndScale(argc, kPointerSizeLog2));
    859   } else {
    860     DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
    861     __ Cmp(x10, Operand(argc, LSL, kPointerSizeLog2));
    862   }
    863   __ B(gt, &enough_stack_space);
    864   __ CallRuntime(Runtime::kThrowStackOverflow);
    865   // We should never return from the APPLY_OVERFLOW builtin.
    866   if (__ emit_debug_code()) {
    867     __ Unreachable();
    868   }
    869 
    870   __ Bind(&enough_stack_space);
    871 }
    872 
    873 
    874 // Input:
    875 //   x0: new.target.
    876 //   x1: function.
    877 //   x2: receiver.
    878 //   x3: argc.
    879 //   x4: argv.
    880 // Output:
    881 //   x0: result.
    882 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
    883                                              bool is_construct) {
    884   // Called from JSEntryStub::GenerateBody().
    885   Register new_target = x0;
    886   Register function = x1;
    887   Register receiver = x2;
    888   Register argc = x3;
    889   Register argv = x4;
    890   Register scratch = x10;
    891 
    892   ProfileEntryHookStub::MaybeCallEntryHook(masm);
    893 
    894   {
    895     // Enter an internal frame.
    896     FrameScope scope(masm, StackFrame::INTERNAL);
    897 
    898     // Setup the context (we need to use the caller context from the isolate).
    899     __ Mov(scratch, Operand(ExternalReference(Isolate::kContextAddress,
    900                                               masm->isolate())));
    901     __ Ldr(cp, MemOperand(scratch));
    902 
    903     __ InitializeRootRegister();
    904 
    905     // Push the function and the receiver onto the stack.
    906     __ Push(function, receiver);
    907 
    908     // Check if we have enough stack space to push all arguments.
    909     // Expects argument count in eax. Clobbers ecx, edx, edi.
    910     Generate_CheckStackOverflow(masm, argc, kArgcIsUntaggedInt);
    911 
    912     // Copy arguments to the stack in a loop, in reverse order.
    913     // x3: argc.
    914     // x4: argv.
    915     Label loop, entry;
    916     // Compute the copy end address.
    917     __ Add(scratch, argv, Operand(argc, LSL, kPointerSizeLog2));
    918 
    919     __ B(&entry);
    920     __ Bind(&loop);
    921     __ Ldr(x11, MemOperand(argv, kPointerSize, PostIndex));
    922     __ Ldr(x12, MemOperand(x11));  // Dereference the handle.
    923     __ Push(x12);  // Push the argument.
    924     __ Bind(&entry);
    925     __ Cmp(scratch, argv);
    926     __ B(ne, &loop);
    927 
    928     __ Mov(scratch, argc);
    929     __ Mov(argc, new_target);
    930     __ Mov(new_target, scratch);
    931     // x0: argc.
    932     // x3: new.target.
    933 
    934     // Initialize all JavaScript callee-saved registers, since they will be seen
    935     // by the garbage collector as part of handlers.
    936     // The original values have been saved in JSEntryStub::GenerateBody().
    937     __ LoadRoot(x19, Heap::kUndefinedValueRootIndex);
    938     __ Mov(x20, x19);
    939     __ Mov(x21, x19);
    940     __ Mov(x22, x19);
    941     __ Mov(x23, x19);
    942     __ Mov(x24, x19);
    943     __ Mov(x25, x19);
    944     // Don't initialize the reserved registers.
    945     // x26 : root register (root).
    946     // x27 : context pointer (cp).
    947     // x28 : JS stack pointer (jssp).
    948     // x29 : frame pointer (fp).
    949 
    950     Handle<Code> builtin = is_construct
    951                                ? masm->isolate()->builtins()->Construct()
    952                                : masm->isolate()->builtins()->Call();
    953     __ Call(builtin, RelocInfo::CODE_TARGET);
    954 
    955     // Exit the JS internal frame and remove the parameters (except function),
    956     // and return.
    957   }
    958 
    959   // Result is in x0. Return.
    960   __ Ret();
    961 }
    962 
    963 
    964 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
    965   Generate_JSEntryTrampolineHelper(masm, false);
    966 }
    967 
    968 
    969 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
    970   Generate_JSEntryTrampolineHelper(masm, true);
    971 }
    972 
    973 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) {
    974   Register args_count = scratch;
    975 
    976   // Get the arguments + receiver count.
    977   __ ldr(args_count,
    978          MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
    979   __ Ldr(args_count.W(),
    980          FieldMemOperand(args_count, BytecodeArray::kParameterSizeOffset));
    981 
    982   // Leave the frame (also dropping the register file).
    983   __ LeaveFrame(StackFrame::JAVA_SCRIPT);
    984 
    985   // Drop receiver + arguments.
    986   __ Drop(args_count, 1);
    987 }
    988 
    989 // Generate code for entering a JS function with the interpreter.
    990 // On entry to the function the receiver and arguments have been pushed on the
    991 // stack left to right.  The actual argument count matches the formal parameter
    992 // count expected by the function.
    993 //
    994 // The live registers are:
    995 //   - x1: the JS function object being called.
    996 //   - x3: the new target
    997 //   - cp: our context.
    998 //   - fp: our caller's frame pointer.
    999 //   - jssp: stack pointer.
   1000 //   - lr: return address.
   1001 //
   1002 // The function builds an interpreter frame.  See InterpreterFrameConstants in
   1003 // frames.h for its layout.
   1004 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
   1005   ProfileEntryHookStub::MaybeCallEntryHook(masm);
   1006 
   1007   // Open a frame scope to indicate that there is a frame on the stack.  The
   1008   // MANUAL indicates that the scope shouldn't actually generate code to set up
   1009   // the frame (that is done below).
   1010   FrameScope frame_scope(masm, StackFrame::MANUAL);
   1011   __ Push(lr, fp, cp, x1);
   1012   __ Add(fp, jssp, StandardFrameConstants::kFixedFrameSizeFromFp);
   1013 
   1014   // Get the bytecode array from the function object (or from the DebugInfo if
   1015   // it is present) and load it into kInterpreterBytecodeArrayRegister.
   1016   __ Ldr(x0, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
   1017   Register debug_info = kInterpreterBytecodeArrayRegister;
   1018   Label load_debug_bytecode_array, bytecode_array_loaded;
   1019   DCHECK(!debug_info.is(x0));
   1020   __ Ldr(debug_info, FieldMemOperand(x0, SharedFunctionInfo::kDebugInfoOffset));
   1021   __ Cmp(debug_info, Operand(DebugInfo::uninitialized()));
   1022   __ B(ne, &load_debug_bytecode_array);
   1023   __ Ldr(kInterpreterBytecodeArrayRegister,
   1024          FieldMemOperand(x0, SharedFunctionInfo::kFunctionDataOffset));
   1025   __ Bind(&bytecode_array_loaded);
   1026 
   1027   // Check function data field is actually a BytecodeArray object.
   1028   Label bytecode_array_not_present;
   1029   __ CompareRoot(kInterpreterBytecodeArrayRegister,
   1030                  Heap::kUndefinedValueRootIndex);
   1031   __ B(eq, &bytecode_array_not_present);
   1032   if (FLAG_debug_code) {
   1033     __ AssertNotSmi(kInterpreterBytecodeArrayRegister,
   1034                     kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
   1035     __ CompareObjectType(kInterpreterBytecodeArrayRegister, x0, x0,
   1036                          BYTECODE_ARRAY_TYPE);
   1037     __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
   1038   }
   1039 
   1040   // Load the initial bytecode offset.
   1041   __ Mov(kInterpreterBytecodeOffsetRegister,
   1042          Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
   1043 
   1044   // Push new.target, bytecode array and Smi tagged bytecode array offset.
   1045   __ SmiTag(x0, kInterpreterBytecodeOffsetRegister);
   1046   __ Push(x3, kInterpreterBytecodeArrayRegister, x0);
   1047 
   1048   // Allocate the local and temporary register file on the stack.
   1049   {
   1050     // Load frame size from the BytecodeArray object.
   1051     __ Ldr(w11, FieldMemOperand(kInterpreterBytecodeArrayRegister,
   1052                                 BytecodeArray::kFrameSizeOffset));
   1053 
   1054     // Do a stack check to ensure we don't go over the limit.
   1055     Label ok;
   1056     DCHECK(jssp.Is(__ StackPointer()));
   1057     __ Sub(x10, jssp, Operand(x11));
   1058     __ CompareRoot(x10, Heap::kRealStackLimitRootIndex);
   1059     __ B(hs, &ok);
   1060     __ CallRuntime(Runtime::kThrowStackOverflow);
   1061     __ Bind(&ok);
   1062 
   1063     // If ok, push undefined as the initial value for all register file entries.
   1064     // Note: there should always be at least one stack slot for the return
   1065     // register in the register file.
   1066     Label loop_header;
   1067     __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
   1068     // TODO(rmcilroy): Ensure we always have an even number of registers to
   1069     // allow stack to be 16 bit aligned (and remove need for jssp).
   1070     __ Lsr(x11, x11, kPointerSizeLog2);
   1071     __ PushMultipleTimes(x10, x11);
   1072     __ Bind(&loop_header);
   1073   }
   1074 
   1075   // Load accumulator and dispatch table into registers.
   1076   __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
   1077   __ Mov(kInterpreterDispatchTableRegister,
   1078          Operand(ExternalReference::interpreter_dispatch_table_address(
   1079              masm->isolate())));
   1080 
   1081   // Dispatch to the first bytecode handler for the function.
   1082   __ Ldrb(x1, MemOperand(kInterpreterBytecodeArrayRegister,
   1083                          kInterpreterBytecodeOffsetRegister));
   1084   __ Mov(x1, Operand(x1, LSL, kPointerSizeLog2));
   1085   __ Ldr(ip0, MemOperand(kInterpreterDispatchTableRegister, x1));
   1086   __ Call(ip0);
   1087   masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
   1088 
   1089   // The return value is in x0.
   1090   LeaveInterpreterFrame(masm, x2);
   1091   __ Ret();
   1092 
   1093   // Load debug copy of the bytecode array.
   1094   __ Bind(&load_debug_bytecode_array);
   1095   __ Ldr(kInterpreterBytecodeArrayRegister,
   1096          FieldMemOperand(debug_info, DebugInfo::kAbstractCodeIndex));
   1097   __ B(&bytecode_array_loaded);
   1098 
   1099   // If the bytecode array is no longer present, then the underlying function
   1100   // has been switched to a different kind of code and we heal the closure by
   1101   // switching the code entry field over to the new code object as well.
   1102   __ Bind(&bytecode_array_not_present);
   1103   __ LeaveFrame(StackFrame::JAVA_SCRIPT);
   1104   __ Ldr(x7, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
   1105   __ Ldr(x7, FieldMemOperand(x7, SharedFunctionInfo::kCodeOffset));
   1106   __ Add(x7, x7, Operand(Code::kHeaderSize - kHeapObjectTag));
   1107   __ Str(x7, FieldMemOperand(x1, JSFunction::kCodeEntryOffset));
   1108   __ RecordWriteCodeEntryField(x1, x7, x5);
   1109   __ Jump(x7);
   1110 }
   1111 
   1112 void Builtins::Generate_InterpreterMarkBaselineOnReturn(MacroAssembler* masm) {
   1113   // Save the function and context for call to CompileBaseline.
   1114   __ ldr(x1, MemOperand(fp, StandardFrameConstants::kFunctionOffset));
   1115   __ ldr(kContextRegister,
   1116          MemOperand(fp, StandardFrameConstants::kContextOffset));
   1117 
   1118   // Leave the frame before recompiling for baseline so that we don't count as
   1119   // an activation on the stack.
   1120   LeaveInterpreterFrame(masm, x2);
   1121 
   1122   {
   1123     FrameScope frame_scope(masm, StackFrame::INTERNAL);
   1124     // Push return value.
   1125     __ push(x0);
   1126 
   1127     // Push function as argument and compile for baseline.
   1128     __ push(x1);
   1129     __ CallRuntime(Runtime::kCompileBaseline);
   1130 
   1131     // Restore return value.
   1132     __ pop(x0);
   1133   }
   1134   __ Ret();
   1135 }
   1136 
   1137 // static
   1138 void Builtins::Generate_InterpreterPushArgsAndCallImpl(
   1139     MacroAssembler* masm, TailCallMode tail_call_mode) {
   1140   // ----------- S t a t e -------------
   1141   //  -- x0 : the number of arguments (not including the receiver)
   1142   //  -- x2 : the address of the first argument to be pushed. Subsequent
   1143   //          arguments should be consecutive above this, in the same order as
   1144   //          they are to be pushed onto the stack.
   1145   //  -- x1 : the target to call (can be any Object).
   1146   // -----------------------------------
   1147 
   1148   // Find the address of the last argument.
   1149   __ add(x3, x0, Operand(1));  // Add one for receiver.
   1150   __ lsl(x3, x3, kPointerSizeLog2);
   1151   __ sub(x4, x2, x3);
   1152 
   1153   // Push the arguments.
   1154   Label loop_header, loop_check;
   1155   __ Mov(x5, jssp);
   1156   __ Claim(x3, 1);
   1157   __ B(&loop_check);
   1158   __ Bind(&loop_header);
   1159   // TODO(rmcilroy): Push two at a time once we ensure we keep stack aligned.
   1160   __ Ldr(x3, MemOperand(x2, -kPointerSize, PostIndex));
   1161   __ Str(x3, MemOperand(x5, -kPointerSize, PreIndex));
   1162   __ Bind(&loop_check);
   1163   __ Cmp(x2, x4);
   1164   __ B(gt, &loop_header);
   1165 
   1166   // Call the target.
   1167   __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
   1168                                             tail_call_mode),
   1169           RelocInfo::CODE_TARGET);
   1170 }
   1171 
   1172 // static
   1173 void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
   1174   // ----------- S t a t e -------------
   1175   // -- x0 : argument count (not including receiver)
   1176   // -- x3 : new target
   1177   // -- x1 : constructor to call
   1178   // -- x2 : address of the first argument
   1179   // -----------------------------------
   1180 
   1181   // Find the address of the last argument.
   1182   __ add(x5, x0, Operand(1));  // Add one for receiver (to be constructed).
   1183   __ lsl(x5, x5, kPointerSizeLog2);
   1184 
   1185   // Set stack pointer and where to stop.
   1186   __ Mov(x6, jssp);
   1187   __ Claim(x5, 1);
   1188   __ sub(x4, x6, x5);
   1189 
   1190   // Push a slot for the receiver.
   1191   __ Str(xzr, MemOperand(x6, -kPointerSize, PreIndex));
   1192 
   1193   Label loop_header, loop_check;
   1194   // Push the arguments.
   1195   __ B(&loop_check);
   1196   __ Bind(&loop_header);
   1197   // TODO(rmcilroy): Push two at a time once we ensure we keep stack aligned.
   1198   __ Ldr(x5, MemOperand(x2, -kPointerSize, PostIndex));
   1199   __ Str(x5, MemOperand(x6, -kPointerSize, PreIndex));
   1200   __ Bind(&loop_check);
   1201   __ Cmp(x6, x4);
   1202   __ B(gt, &loop_header);
   1203 
   1204   // Call the constructor with x0, x1, and x3 unmodified.
   1205   __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
   1206 }
   1207 
   1208 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
   1209   // Set the return address to the correct point in the interpreter entry
   1210   // trampoline.
   1211   Smi* interpreter_entry_return_pc_offset(
   1212       masm->isolate()->heap()->interpreter_entry_return_pc_offset());
   1213   DCHECK_NE(interpreter_entry_return_pc_offset, Smi::FromInt(0));
   1214   __ LoadObject(x1, masm->isolate()->builtins()->InterpreterEntryTrampoline());
   1215   __ Add(lr, x1, Operand(interpreter_entry_return_pc_offset->value() +
   1216                          Code::kHeaderSize - kHeapObjectTag));
   1217 
   1218   // Initialize the dispatch table register.
   1219   __ Mov(kInterpreterDispatchTableRegister,
   1220          Operand(ExternalReference::interpreter_dispatch_table_address(
   1221              masm->isolate())));
   1222 
   1223   // Get the bytecode array pointer from the frame.
   1224   __ Ldr(kInterpreterBytecodeArrayRegister,
   1225          MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
   1226 
   1227   if (FLAG_debug_code) {
   1228     // Check function data field is actually a BytecodeArray object.
   1229     __ AssertNotSmi(kInterpreterBytecodeArrayRegister,
   1230                     kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
   1231     __ CompareObjectType(kInterpreterBytecodeArrayRegister, x1, x1,
   1232                          BYTECODE_ARRAY_TYPE);
   1233     __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
   1234   }
   1235 
   1236   // Get the target bytecode offset from the frame.
   1237   __ Ldr(kInterpreterBytecodeOffsetRegister,
   1238          MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
   1239   __ SmiUntag(kInterpreterBytecodeOffsetRegister);
   1240 
   1241   // Dispatch to the target bytecode.
   1242   __ Ldrb(x1, MemOperand(kInterpreterBytecodeArrayRegister,
   1243                          kInterpreterBytecodeOffsetRegister));
   1244   __ Mov(x1, Operand(x1, LSL, kPointerSizeLog2));
   1245   __ Ldr(ip0, MemOperand(kInterpreterDispatchTableRegister, x1));
   1246   __ Jump(ip0);
   1247 }
   1248 
   1249 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
   1250   // ----------- S t a t e -------------
   1251   //  -- x0 : argument count (preserved for callee)
   1252   //  -- x3 : new target (preserved for callee)
   1253   //  -- x1 : target function (preserved for callee)
   1254   // -----------------------------------
   1255   // First lookup code, maybe we don't need to compile!
   1256   Label gotta_call_runtime;
   1257   Label maybe_call_runtime;
   1258   Label try_shared;
   1259   Label loop_top, loop_bottom;
   1260 
   1261   Register closure = x1;
   1262   Register map = x13;
   1263   Register index = x2;
   1264   __ Ldr(map, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
   1265   __ Ldr(map,
   1266          FieldMemOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
   1267   __ Ldrsw(index, UntagSmiFieldMemOperand(map, FixedArray::kLengthOffset));
   1268   __ Cmp(index, Operand(2));
   1269   __ B(lt, &gotta_call_runtime);
   1270 
   1271   // Find literals.
   1272   // x3  : native context
   1273   // x2  : length / index
   1274   // x13 : optimized code map
   1275   // stack[0] : new target
   1276   // stack[4] : closure
   1277   Register native_context = x4;
   1278   __ Ldr(native_context, NativeContextMemOperand());
   1279 
   1280   __ Bind(&loop_top);
   1281   Register temp = x5;
   1282   Register array_pointer = x6;
   1283 
   1284   // Does the native context match?
   1285   __ Add(array_pointer, map, Operand(index, LSL, kPointerSizeLog2));
   1286   __ Ldr(temp, FieldMemOperand(array_pointer,
   1287                                SharedFunctionInfo::kOffsetToPreviousContext));
   1288   __ Ldr(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
   1289   __ Cmp(temp, native_context);
   1290   __ B(ne, &loop_bottom);
   1291   // OSR id set to none?
   1292   __ Ldr(temp, FieldMemOperand(array_pointer,
   1293                                SharedFunctionInfo::kOffsetToPreviousOsrAstId));
   1294   const int bailout_id = BailoutId::None().ToInt();
   1295   __ Cmp(temp, Operand(Smi::FromInt(bailout_id)));
   1296   __ B(ne, &loop_bottom);
   1297 
   1298   // Literals available?
   1299   Label got_literals, maybe_cleared_weakcell;
   1300   Register temp2 = x7;
   1301   __ Ldr(temp, FieldMemOperand(array_pointer,
   1302                                SharedFunctionInfo::kOffsetToPreviousLiterals));
   1303   // temp contains either a WeakCell pointing to the literals array or the
   1304   // literals array directly.
   1305   STATIC_ASSERT(WeakCell::kValueOffset == FixedArray::kLengthOffset);
   1306   __ Ldr(temp2, FieldMemOperand(temp, WeakCell::kValueOffset));
   1307   __ JumpIfSmi(temp2, &maybe_cleared_weakcell);
   1308   // temp2 is a pointer, therefore temp is a WeakCell pointing to a literals
   1309   // array.
   1310   __ Ldr(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
   1311   __ jmp(&got_literals);
   1312 
   1313   // r4 is a smi. If it's 0, then we are looking at a cleared WeakCell
   1314   // around the literals array, and we should visit the runtime. If it's > 0,
   1315   // then temp already contains the literals array.
   1316   __ bind(&maybe_cleared_weakcell);
   1317   __ Cmp(temp2, Operand(Smi::FromInt(0)));
   1318   __ B(eq, &gotta_call_runtime);
   1319 
   1320   // Save the literals in the closure.
   1321   __ bind(&got_literals);
   1322   __ Str(temp, FieldMemOperand(closure, JSFunction::kLiteralsOffset));
   1323   __ RecordWriteField(closure, JSFunction::kLiteralsOffset, temp, x7,
   1324                       kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
   1325                       OMIT_SMI_CHECK);
   1326 
   1327   // Code available?
   1328   Register entry = x7;
   1329   __ Ldr(entry,
   1330          FieldMemOperand(array_pointer,
   1331                          SharedFunctionInfo::kOffsetToPreviousCachedCode));
   1332   __ Ldr(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
   1333   __ JumpIfSmi(entry, &maybe_call_runtime);
   1334 
   1335   // Found literals and code. Get them into the closure and return.
   1336   __ Add(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
   1337 
   1338   Label install_optimized_code_and_tailcall;
   1339   __ Bind(&install_optimized_code_and_tailcall);
   1340   __ Str(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset));
   1341   __ RecordWriteCodeEntryField(closure, entry, x5);
   1342 
   1343   // Link the closure into the optimized function list.
   1344   // x7 : code entry
   1345   // x4 : native context
   1346   // x1 : closure
   1347   __ Ldr(x8,
   1348          ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
   1349   __ Str(x8, FieldMemOperand(closure, JSFunction::kNextFunctionLinkOffset));
   1350   __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, x8, x13,
   1351                       kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
   1352                       OMIT_SMI_CHECK);
   1353   const int function_list_offset =
   1354       Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST);
   1355   __ Str(closure,
   1356          ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
   1357   __ Mov(x5, closure);
   1358   __ RecordWriteContextSlot(native_context, function_list_offset, x5, x13,
   1359                             kLRHasNotBeenSaved, kDontSaveFPRegs);
   1360   __ Jump(entry);
   1361 
   1362   __ Bind(&loop_bottom);
   1363   __ Sub(index, index, Operand(SharedFunctionInfo::kEntryLength));
   1364   __ Cmp(index, Operand(1));
   1365   __ B(gt, &loop_top);
   1366 
   1367   // We found neither literals nor code.
   1368   __ B(&gotta_call_runtime);
   1369 
   1370   __ Bind(&maybe_call_runtime);
   1371 
   1372   // Last possibility. Check the context free optimized code map entry.
   1373   __ Ldr(entry, FieldMemOperand(map, FixedArray::kHeaderSize +
   1374                                          SharedFunctionInfo::kSharedCodeIndex));
   1375   __ Ldr(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
   1376   __ JumpIfSmi(entry, &try_shared);
   1377 
   1378   // Store code entry in the closure.
   1379   __ Add(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
   1380   __ B(&install_optimized_code_and_tailcall);
   1381 
   1382   __ Bind(&try_shared);
   1383   // Is the full code valid?
   1384   __ Ldr(entry,
   1385          FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
   1386   __ Ldr(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset));
   1387   __ Ldr(x5, FieldMemOperand(entry, Code::kFlagsOffset));
   1388   __ and_(x5, x5, Operand(Code::KindField::kMask));
   1389   __ Mov(x5, Operand(x5, LSR, Code::KindField::kShift));
   1390   __ Cmp(x5, Operand(Code::BUILTIN));
   1391   __ B(eq, &gotta_call_runtime);
   1392   // Yes, install the full code.
   1393   __ Add(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
   1394   __ Str(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset));
   1395   __ RecordWriteCodeEntryField(closure, entry, x5);
   1396   __ Jump(entry);
   1397 
   1398   __ Bind(&gotta_call_runtime);
   1399   GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
   1400 }
   1401 
   1402 void Builtins::Generate_CompileBaseline(MacroAssembler* masm) {
   1403   GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline);
   1404 }
   1405 
   1406 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
   1407   GenerateTailCallToReturnedCode(masm,
   1408                                  Runtime::kCompileOptimized_NotConcurrent);
   1409 }
   1410 
   1411 
   1412 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
   1413   GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
   1414 }
   1415 
   1416 
   1417 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
   1418   // For now, we are relying on the fact that make_code_young doesn't do any
   1419   // garbage collection which allows us to save/restore the registers without
   1420   // worrying about which of them contain pointers. We also don't build an
   1421   // internal frame to make the code fast, since we shouldn't have to do stack
   1422   // crawls in MakeCodeYoung. This seems a bit fragile.
   1423 
   1424   // The following caller-saved registers must be saved and restored when
   1425   // calling through to the runtime:
   1426   //   x0 - The address from which to resume execution.
   1427   //   x1 - isolate
   1428   //   x3 - new target
   1429   //   lr - The return address for the JSFunction itself. It has not yet been
   1430   //        preserved on the stack because the frame setup code was replaced
   1431   //        with a call to this stub, to handle code ageing.
   1432   {
   1433     FrameScope scope(masm, StackFrame::MANUAL);
   1434     __ Push(x0, x1, x3, fp, lr);
   1435     __ Mov(x1, ExternalReference::isolate_address(masm->isolate()));
   1436     __ CallCFunction(
   1437         ExternalReference::get_make_code_young_function(masm->isolate()), 2);
   1438     __ Pop(lr, fp, x3, x1, x0);
   1439   }
   1440 
   1441   // The calling function has been made young again, so return to execute the
   1442   // real frame set-up code.
   1443   __ Br(x0);
   1444 }
   1445 
   1446 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C)                 \
   1447 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking(  \
   1448     MacroAssembler* masm) {                                  \
   1449   GenerateMakeCodeYoungAgainCommon(masm);                    \
   1450 }                                                            \
   1451 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking(   \
   1452     MacroAssembler* masm) {                                  \
   1453   GenerateMakeCodeYoungAgainCommon(masm);                    \
   1454 }
   1455 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
   1456 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
   1457 
   1458 
   1459 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
   1460   // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
   1461   // that make_code_young doesn't do any garbage collection which allows us to
   1462   // save/restore the registers without worrying about which of them contain
   1463   // pointers.
   1464 
   1465   // The following caller-saved registers must be saved and restored when
   1466   // calling through to the runtime:
   1467   //   x0 - The address from which to resume execution.
   1468   //   x1 - isolate
   1469   //   x3 - new target
   1470   //   lr - The return address for the JSFunction itself. It has not yet been
   1471   //        preserved on the stack because the frame setup code was replaced
   1472   //        with a call to this stub, to handle code ageing.
   1473   {
   1474     FrameScope scope(masm, StackFrame::MANUAL);
   1475     __ Push(x0, x1, x3, fp, lr);
   1476     __ Mov(x1, ExternalReference::isolate_address(masm->isolate()));
   1477     __ CallCFunction(
   1478         ExternalReference::get_mark_code_as_executed_function(
   1479             masm->isolate()), 2);
   1480     __ Pop(lr, fp, x3, x1, x0);
   1481 
   1482     // Perform prologue operations usually performed by the young code stub.
   1483     __ EmitFrameSetupForCodeAgePatching(masm);
   1484   }
   1485 
   1486   // Jump to point after the code-age stub.
   1487   __ Add(x0, x0, kNoCodeAgeSequenceLength);
   1488   __ Br(x0);
   1489 }
   1490 
   1491 
   1492 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
   1493   GenerateMakeCodeYoungAgainCommon(masm);
   1494 }
   1495 
   1496 
   1497 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
   1498   Generate_MarkCodeAsExecutedOnce(masm);
   1499 }
   1500 
   1501 
   1502 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
   1503                                              SaveFPRegsMode save_doubles) {
   1504   {
   1505     FrameScope scope(masm, StackFrame::INTERNAL);
   1506 
   1507     // Preserve registers across notification, this is important for compiled
   1508     // stubs that tail call the runtime on deopts passing their parameters in
   1509     // registers.
   1510     // TODO(jbramley): Is it correct (and appropriate) to use safepoint
   1511     // registers here? According to the comment above, we should only need to
   1512     // preserve the registers with parameters.
   1513     __ PushXRegList(kSafepointSavedRegisters);
   1514     // Pass the function and deoptimization type to the runtime system.
   1515     __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
   1516     __ PopXRegList(kSafepointSavedRegisters);
   1517   }
   1518 
   1519   // Ignore state (pushed by Deoptimizer::EntryGenerator::Generate).
   1520   __ Drop(1);
   1521 
   1522   // Jump to the miss handler. Deoptimizer::EntryGenerator::Generate loads this
   1523   // into lr before it jumps here.
   1524   __ Br(lr);
   1525 }
   1526 
   1527 
   1528 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
   1529   Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
   1530 }
   1531 
   1532 
   1533 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
   1534   Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
   1535 }
   1536 
   1537 
   1538 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
   1539                                              Deoptimizer::BailoutType type) {
   1540   {
   1541     FrameScope scope(masm, StackFrame::INTERNAL);
   1542     // Pass the deoptimization type to the runtime system.
   1543     __ Mov(x0, Smi::FromInt(static_cast<int>(type)));
   1544     __ Push(x0);
   1545     __ CallRuntime(Runtime::kNotifyDeoptimized);
   1546   }
   1547 
   1548   // Get the full codegen state from the stack and untag it.
   1549   Register state = x6;
   1550   __ Peek(state, 0);
   1551   __ SmiUntag(state);
   1552 
   1553   // Switch on the state.
   1554   Label with_tos_register, unknown_state;
   1555   __ CompareAndBranch(state,
   1556                       static_cast<int>(Deoptimizer::BailoutState::NO_REGISTERS),
   1557                       ne, &with_tos_register);
   1558   __ Drop(1);  // Remove state.
   1559   __ Ret();
   1560 
   1561   __ Bind(&with_tos_register);
   1562   // Reload TOS register.
   1563   DCHECK_EQ(kInterpreterAccumulatorRegister.code(), x0.code());
   1564   __ Peek(x0, kPointerSize);
   1565   __ CompareAndBranch(state,
   1566                       static_cast<int>(Deoptimizer::BailoutState::TOS_REGISTER),
   1567                       ne, &unknown_state);
   1568   __ Drop(2);  // Remove state and TOS.
   1569   __ Ret();
   1570 
   1571   __ Bind(&unknown_state);
   1572   __ Abort(kInvalidFullCodegenState);
   1573 }
   1574 
   1575 
   1576 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
   1577   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
   1578 }
   1579 
   1580 
   1581 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
   1582   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
   1583 }
   1584 
   1585 
   1586 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
   1587   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
   1588 }
   1589 
   1590 
   1591 static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
   1592                                     Register function_template_info,
   1593                                     Register scratch0, Register scratch1,
   1594                                     Register scratch2,
   1595                                     Label* receiver_check_failed) {
   1596   Register signature = scratch0;
   1597   Register map = scratch1;
   1598   Register constructor = scratch2;
   1599 
   1600   // If there is no signature, return the holder.
   1601   __ Ldr(signature, FieldMemOperand(function_template_info,
   1602                                     FunctionTemplateInfo::kSignatureOffset));
   1603   __ CompareRoot(signature, Heap::kUndefinedValueRootIndex);
   1604   Label receiver_check_passed;
   1605   __ B(eq, &receiver_check_passed);
   1606 
   1607   // Walk the prototype chain.
   1608   __ Ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
   1609   Label prototype_loop_start;
   1610   __ Bind(&prototype_loop_start);
   1611 
   1612   // Get the constructor, if any
   1613   __ GetMapConstructor(constructor, map, x16, x16);
   1614   __ cmp(x16, Operand(JS_FUNCTION_TYPE));
   1615   Label next_prototype;
   1616   __ B(ne, &next_prototype);
   1617   Register type = constructor;
   1618   __ Ldr(type,
   1619          FieldMemOperand(constructor, JSFunction::kSharedFunctionInfoOffset));
   1620   __ Ldr(type, FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset));
   1621 
   1622   // Loop through the chain of inheriting function templates.
   1623   Label function_template_loop;
   1624   __ Bind(&function_template_loop);
   1625 
   1626   // If the signatures match, we have a compatible receiver.
   1627   __ Cmp(signature, type);
   1628   __ B(eq, &receiver_check_passed);
   1629 
   1630   // If the current type is not a FunctionTemplateInfo, load the next prototype
   1631   // in the chain.
   1632   __ JumpIfSmi(type, &next_prototype);
   1633   __ CompareObjectType(type, x16, x17, FUNCTION_TEMPLATE_INFO_TYPE);
   1634   __ B(ne, &next_prototype);
   1635 
   1636   // Otherwise load the parent function template and iterate.
   1637   __ Ldr(type,
   1638          FieldMemOperand(type, FunctionTemplateInfo::kParentTemplateOffset));
   1639   __ B(&function_template_loop);
   1640 
   1641   // Load the next prototype.
   1642   __ Bind(&next_prototype);
   1643   __ Ldr(x16, FieldMemOperand(map, Map::kBitField3Offset));
   1644   __ Tst(x16, Operand(Map::HasHiddenPrototype::kMask));
   1645   __ B(eq, receiver_check_failed);
   1646   __ Ldr(receiver, FieldMemOperand(map, Map::kPrototypeOffset));
   1647   __ Ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
   1648   // Iterate.
   1649   __ B(&prototype_loop_start);
   1650 
   1651   __ Bind(&receiver_check_passed);
   1652 }
   1653 
   1654 
   1655 void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
   1656   // ----------- S t a t e -------------
   1657   //  -- x0                 : number of arguments excluding receiver
   1658   //  -- x1                 : callee
   1659   //  -- lr                 : return address
   1660   //  -- sp[0]              : last argument
   1661   //  -- ...
   1662   //  -- sp[8 * (argc - 1)] : first argument
   1663   //  -- sp[8 * argc]       : receiver
   1664   // -----------------------------------
   1665 
   1666   // Load the FunctionTemplateInfo.
   1667   __ Ldr(x3, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
   1668   __ Ldr(x3, FieldMemOperand(x3, SharedFunctionInfo::kFunctionDataOffset));
   1669 
   1670   // Do the compatible receiver check.
   1671   Label receiver_check_failed;
   1672   __ Ldr(x2, MemOperand(jssp, x0, LSL, kPointerSizeLog2));
   1673   CompatibleReceiverCheck(masm, x2, x3, x4, x5, x6, &receiver_check_failed);
   1674 
   1675   // Get the callback offset from the FunctionTemplateInfo, and jump to the
   1676   // beginning of the code.
   1677   __ Ldr(x4, FieldMemOperand(x3, FunctionTemplateInfo::kCallCodeOffset));
   1678   __ Ldr(x4, FieldMemOperand(x4, CallHandlerInfo::kFastHandlerOffset));
   1679   __ Add(x4, x4, Operand(Code::kHeaderSize - kHeapObjectTag));
   1680   __ Jump(x4);
   1681 
   1682   // Compatible receiver check failed: throw an Illegal Invocation exception.
   1683   __ Bind(&receiver_check_failed);
   1684   // Drop the arguments (including the receiver)
   1685   __ add(x0, x0, Operand(1));
   1686   __ Drop(x0);
   1687   __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
   1688 }
   1689 
   1690 
   1691 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
   1692   // Lookup the function in the JavaScript frame.
   1693   __ Ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
   1694   {
   1695     FrameScope scope(masm, StackFrame::INTERNAL);
   1696     // Pass function as argument.
   1697     __ Push(x0);
   1698     __ CallRuntime(Runtime::kCompileForOnStackReplacement);
   1699   }
   1700 
   1701   // If the code object is null, just return to the unoptimized code.
   1702   Label skip;
   1703   __ CompareAndBranch(x0, Smi::FromInt(0), ne, &skip);
   1704   __ Ret();
   1705 
   1706   __ Bind(&skip);
   1707 
   1708   // Load deoptimization data from the code object.
   1709   // <deopt_data> = <code>[#deoptimization_data_offset]
   1710   __ Ldr(x1, MemOperand(x0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
   1711 
   1712   // Load the OSR entrypoint offset from the deoptimization data.
   1713   // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
   1714   __ Ldrsw(w1, UntagSmiFieldMemOperand(x1, FixedArray::OffsetOfElementAt(
   1715       DeoptimizationInputData::kOsrPcOffsetIndex)));
   1716 
   1717   // Compute the target address = code_obj + header_size + osr_offset
   1718   // <entry_addr> = <code_obj> + #header_size + <osr_offset>
   1719   __ Add(x0, x0, x1);
   1720   __ Add(lr, x0, Code::kHeaderSize - kHeapObjectTag);
   1721 
   1722   // And "return" to the OSR entry point of the function.
   1723   __ Ret();
   1724 }
   1725 
   1726 
   1727 // static
   1728 void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
   1729                                                int field_index) {
   1730   // ----------- S t a t e -------------
   1731   //  -- x0      : number of arguments
   1732   //  -- x1      : function
   1733   //  -- cp      : context
   1734   //  -- lr      : return address
   1735   //  -- jssp[0] : receiver
   1736   // -----------------------------------
   1737   ASM_LOCATION("Builtins::Generate_DatePrototype_GetField");
   1738 
   1739   // 1. Pop receiver into x0 and check that it's actually a JSDate object.
   1740   Label receiver_not_date;
   1741   {
   1742     __ Pop(x0);
   1743     __ JumpIfSmi(x0, &receiver_not_date);
   1744     __ JumpIfNotObjectType(x0, x2, x3, JS_DATE_TYPE, &receiver_not_date);
   1745   }
   1746 
   1747   // 2. Load the specified date field, falling back to the runtime as necessary.
   1748   if (field_index == JSDate::kDateValue) {
   1749     __ Ldr(x0, FieldMemOperand(x0, JSDate::kValueOffset));
   1750   } else {
   1751     if (field_index < JSDate::kFirstUncachedField) {
   1752       Label stamp_mismatch;
   1753       __ Mov(x1, ExternalReference::date_cache_stamp(masm->isolate()));
   1754       __ Ldr(x1, MemOperand(x1));
   1755       __ Ldr(x2, FieldMemOperand(x0, JSDate::kCacheStampOffset));
   1756       __ Cmp(x1, x2);
   1757       __ B(ne, &stamp_mismatch);
   1758       __ Ldr(x0, FieldMemOperand(
   1759                      x0, JSDate::kValueOffset + field_index * kPointerSize));
   1760       __ Ret();
   1761       __ Bind(&stamp_mismatch);
   1762     }
   1763     FrameScope scope(masm, StackFrame::INTERNAL);
   1764     __ Mov(x1, Smi::FromInt(field_index));
   1765     __ CallCFunction(
   1766         ExternalReference::get_date_field_function(masm->isolate()), 2);
   1767   }
   1768   __ Ret();
   1769 
   1770   // 3. Raise a TypeError if the receiver is not a date.
   1771   __ Bind(&receiver_not_date);
   1772   {
   1773     FrameScope scope(masm, StackFrame::MANUAL);
   1774     __ Push(x0, lr, fp);
   1775     __ Move(fp, jssp);
   1776     __ Push(cp, x1);
   1777     __ Push(Smi::FromInt(0));
   1778     __ CallRuntime(Runtime::kThrowNotDateError);
   1779   }
   1780 }
   1781 
   1782 // static
   1783 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
   1784   // ----------- S t a t e -------------
   1785   //  -- x0       : argc
   1786   //  -- jssp[0]  : argArray (if argc == 2)
   1787   //  -- jssp[8]  : thisArg  (if argc >= 1)
   1788   //  -- jssp[16] : receiver
   1789   // -----------------------------------
   1790   ASM_LOCATION("Builtins::Generate_FunctionPrototypeApply");
   1791 
   1792   Register argc = x0;
   1793   Register arg_array = x0;
   1794   Register receiver = x1;
   1795   Register this_arg = x2;
   1796   Register undefined_value = x3;
   1797   Register null_value = x4;
   1798 
   1799   __ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex);
   1800   __ LoadRoot(null_value, Heap::kNullValueRootIndex);
   1801 
   1802   // 1. Load receiver into x1, argArray into x0 (if present), remove all
   1803   // arguments from the stack (including the receiver), and push thisArg (if
   1804   // present) instead.
   1805   {
   1806     // Claim (2 - argc) dummy arguments from the stack, to put the stack in a
   1807     // consistent state for a simple pop operation.
   1808     __ Claim(2);
   1809     __ Drop(argc);
   1810 
   1811     // ----------- S t a t e -------------
   1812     //  -- x0       : argc
   1813     //  -- jssp[0]  : argArray (dummy value if argc <= 1)
   1814     //  -- jssp[8]  : thisArg  (dummy value if argc == 0)
   1815     //  -- jssp[16] : receiver
   1816     // -----------------------------------
   1817     __ Cmp(argc, 1);
   1818     __ Pop(arg_array, this_arg);               // Overwrites argc.
   1819     __ CmovX(this_arg, undefined_value, lo);   // undefined if argc == 0.
   1820     __ CmovX(arg_array, undefined_value, ls);  // undefined if argc <= 1.
   1821 
   1822     __ Peek(receiver, 0);
   1823     __ Poke(this_arg, 0);
   1824   }
   1825 
   1826   // ----------- S t a t e -------------
   1827   //  -- x0      : argArray
   1828   //  -- x1      : receiver
   1829   //  -- x3      : undefined root value
   1830   //  -- jssp[0] : thisArg
   1831   // -----------------------------------
   1832 
   1833   // 2. Make sure the receiver is actually callable.
   1834   Label receiver_not_callable;
   1835   __ JumpIfSmi(receiver, &receiver_not_callable);
   1836   __ Ldr(x10, FieldMemOperand(receiver, HeapObject::kMapOffset));
   1837   __ Ldrb(w10, FieldMemOperand(x10, Map::kBitFieldOffset));
   1838   __ TestAndBranchIfAllClear(x10, 1 << Map::kIsCallable,
   1839                              &receiver_not_callable);
   1840 
   1841   // 3. Tail call with no arguments if argArray is null or undefined.
   1842   Label no_arguments;
   1843   __ Cmp(arg_array, null_value);
   1844   __ Ccmp(arg_array, undefined_value, ZFlag, ne);
   1845   __ B(eq, &no_arguments);
   1846 
   1847   // 4a. Apply the receiver to the given argArray (passing undefined for
   1848   // new.target in x3).
   1849   DCHECK(undefined_value.Is(x3));
   1850   __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
   1851 
   1852   // 4b. The argArray is either null or undefined, so we tail call without any
   1853   // arguments to the receiver.
   1854   __ Bind(&no_arguments);
   1855   {
   1856     __ Mov(x0, 0);
   1857     DCHECK(receiver.Is(x1));
   1858     __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
   1859   }
   1860 
   1861   // 4c. The receiver is not callable, throw an appropriate TypeError.
   1862   __ Bind(&receiver_not_callable);
   1863   {
   1864     __ Poke(receiver, 0);
   1865     __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
   1866   }
   1867 }
   1868 
   1869 
   1870 // static
   1871 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
   1872   Register argc = x0;
   1873   Register function = x1;
   1874   Register scratch1 = x10;
   1875   Register scratch2 = x11;
   1876 
   1877   ASM_LOCATION("Builtins::Generate_FunctionPrototypeCall");
   1878 
   1879   // 1. Make sure we have at least one argument.
   1880   {
   1881     Label done;
   1882     __ Cbnz(argc, &done);
   1883     __ LoadRoot(scratch1, Heap::kUndefinedValueRootIndex);
   1884     __ Push(scratch1);
   1885     __ Mov(argc, 1);
   1886     __ Bind(&done);
   1887   }
   1888 
   1889   // 2. Get the callable to call (passed as receiver) from the stack.
   1890   __ Peek(function, Operand(argc, LSL, kXRegSizeLog2));
   1891 
   1892   // 3. Shift arguments and return address one slot down on the stack
   1893   //    (overwriting the original receiver).  Adjust argument count to make
   1894   //    the original first argument the new receiver.
   1895   {
   1896     Label loop;
   1897     // Calculate the copy start address (destination). Copy end address is jssp.
   1898     __ Add(scratch2, jssp, Operand(argc, LSL, kPointerSizeLog2));
   1899     __ Sub(scratch1, scratch2, kPointerSize);
   1900 
   1901     __ Bind(&loop);
   1902     __ Ldr(x12, MemOperand(scratch1, -kPointerSize, PostIndex));
   1903     __ Str(x12, MemOperand(scratch2, -kPointerSize, PostIndex));
   1904     __ Cmp(scratch1, jssp);
   1905     __ B(ge, &loop);
   1906     // Adjust the actual number of arguments and remove the top element
   1907     // (which is a copy of the last argument).
   1908     __ Sub(argc, argc, 1);
   1909     __ Drop(1);
   1910   }
   1911 
   1912   // 4. Call the callable.
   1913   __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
   1914 }
   1915 
   1916 
   1917 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
   1918   // ----------- S t a t e -------------
   1919   //  -- x0       : argc
   1920   //  -- jssp[0]  : argumentsList (if argc == 3)
   1921   //  -- jssp[8]  : thisArgument  (if argc >= 2)
   1922   //  -- jssp[16] : target        (if argc >= 1)
   1923   //  -- jssp[24] : receiver
   1924   // -----------------------------------
   1925   ASM_LOCATION("Builtins::Generate_ReflectApply");
   1926 
   1927   Register argc = x0;
   1928   Register arguments_list = x0;
   1929   Register target = x1;
   1930   Register this_argument = x2;
   1931   Register undefined_value = x3;
   1932 
   1933   __ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex);
   1934 
   1935   // 1. Load target into x1 (if present), argumentsList into x0 (if present),
   1936   // remove all arguments from the stack (including the receiver), and push
   1937   // thisArgument (if present) instead.
   1938   {
   1939     // Claim (3 - argc) dummy arguments from the stack, to put the stack in a
   1940     // consistent state for a simple pop operation.
   1941     __ Claim(3);
   1942     __ Drop(argc);
   1943 
   1944     // ----------- S t a t e -------------
   1945     //  -- x0       : argc
   1946     //  -- jssp[0]  : argumentsList (dummy value if argc <= 2)
   1947     //  -- jssp[8]  : thisArgument  (dummy value if argc <= 1)
   1948     //  -- jssp[16] : target        (dummy value if argc == 0)
   1949     //  -- jssp[24] : receiver
   1950     // -----------------------------------
   1951     __ Adds(x10, argc, 0);  // Preserve argc, and set the Z flag if it is zero.
   1952     __ Pop(arguments_list, this_argument, target);  // Overwrites argc.
   1953     __ CmovX(target, undefined_value, eq);          // undefined if argc == 0.
   1954     __ Cmp(x10, 2);
   1955     __ CmovX(this_argument, undefined_value, lo);   // undefined if argc <= 1.
   1956     __ CmovX(arguments_list, undefined_value, ls);  // undefined if argc <= 2.
   1957 
   1958     __ Poke(this_argument, 0);  // Overwrite receiver.
   1959   }
   1960 
   1961   // ----------- S t a t e -------------
   1962   //  -- x0      : argumentsList
   1963   //  -- x1      : target
   1964   //  -- jssp[0] : thisArgument
   1965   // -----------------------------------
   1966 
   1967   // 2. Make sure the target is actually callable.
   1968   Label target_not_callable;
   1969   __ JumpIfSmi(target, &target_not_callable);
   1970   __ Ldr(x10, FieldMemOperand(target, HeapObject::kMapOffset));
   1971   __ Ldr(x10, FieldMemOperand(x10, Map::kBitFieldOffset));
   1972   __ TestAndBranchIfAllClear(x10, 1 << Map::kIsCallable, &target_not_callable);
   1973 
   1974   // 3a. Apply the target to the given argumentsList (passing undefined for
   1975   // new.target in x3).
   1976   DCHECK(undefined_value.Is(x3));
   1977   __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
   1978 
   1979   // 3b. The target is not callable, throw an appropriate TypeError.
   1980   __ Bind(&target_not_callable);
   1981   {
   1982     __ Poke(target, 0);
   1983     __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
   1984   }
   1985 }
   1986 
   1987 
   1988 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
   1989   // ----------- S t a t e -------------
   1990   //  -- x0       : argc
   1991   //  -- jssp[0]  : new.target (optional)
   1992   //  -- jssp[8]  : argumentsList
   1993   //  -- jssp[16] : target
   1994   //  -- jssp[24] : receiver
   1995   // -----------------------------------
   1996   ASM_LOCATION("Builtins::Generate_ReflectConstruct");
   1997 
   1998   Register argc = x0;
   1999   Register arguments_list = x0;
   2000   Register target = x1;
   2001   Register new_target = x3;
   2002   Register undefined_value = x4;
   2003 
   2004   __ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex);
   2005 
   2006   // 1. Load target into x1 (if present), argumentsList into x0 (if present),
   2007   // new.target into x3 (if present, otherwise use target), remove all
   2008   // arguments from the stack (including the receiver), and push thisArgument
   2009   // (if present) instead.
   2010   {
   2011     // Claim (3 - argc) dummy arguments from the stack, to put the stack in a
   2012     // consistent state for a simple pop operation.
   2013     __ Claim(3);
   2014     __ Drop(argc);
   2015 
   2016     // ----------- S t a t e -------------
   2017     //  -- x0       : argc
   2018     //  -- jssp[0]  : new.target    (dummy value if argc <= 2)
   2019     //  -- jssp[8]  : argumentsList (dummy value if argc <= 1)
   2020     //  -- jssp[16] : target        (dummy value if argc == 0)
   2021     //  -- jssp[24] : receiver
   2022     // -----------------------------------
   2023     __ Adds(x10, argc, 0);  // Preserve argc, and set the Z flag if it is zero.
   2024     __ Pop(new_target, arguments_list, target);  // Overwrites argc.
   2025     __ CmovX(target, undefined_value, eq);       // undefined if argc == 0.
   2026     __ Cmp(x10, 2);
   2027     __ CmovX(arguments_list, undefined_value, lo);  // undefined if argc <= 1.
   2028     __ CmovX(new_target, target, ls);               // target if argc <= 2.
   2029 
   2030     __ Poke(undefined_value, 0);  // Overwrite receiver.
   2031   }
   2032 
   2033   // ----------- S t a t e -------------
   2034   //  -- x0      : argumentsList
   2035   //  -- x1      : target
   2036   //  -- x3      : new.target
   2037   //  -- jssp[0] : receiver (undefined)
   2038   // -----------------------------------
   2039 
   2040   // 2. Make sure the target is actually a constructor.
   2041   Label target_not_constructor;
   2042   __ JumpIfSmi(target, &target_not_constructor);
   2043   __ Ldr(x10, FieldMemOperand(target, HeapObject::kMapOffset));
   2044   __ Ldrb(x10, FieldMemOperand(x10, Map::kBitFieldOffset));
   2045   __ TestAndBranchIfAllClear(x10, 1 << Map::kIsConstructor,
   2046                              &target_not_constructor);
   2047 
   2048   // 3. Make sure the new.target is actually a constructor.
   2049   Label new_target_not_constructor;
   2050   __ JumpIfSmi(new_target, &new_target_not_constructor);
   2051   __ Ldr(x10, FieldMemOperand(new_target, HeapObject::kMapOffset));
   2052   __ Ldrb(x10, FieldMemOperand(x10, Map::kBitFieldOffset));
   2053   __ TestAndBranchIfAllClear(x10, 1 << Map::kIsConstructor,
   2054                              &new_target_not_constructor);
   2055 
   2056   // 4a. Construct the target with the given new.target and argumentsList.
   2057   __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
   2058 
   2059   // 4b. The target is not a constructor, throw an appropriate TypeError.
   2060   __ Bind(&target_not_constructor);
   2061   {
   2062     __ Poke(target, 0);
   2063     __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
   2064   }
   2065 
   2066   // 4c. The new.target is not a constructor, throw an appropriate TypeError.
   2067   __ Bind(&new_target_not_constructor);
   2068   {
   2069     __ Poke(new_target, 0);
   2070     __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
   2071   }
   2072 }
   2073 
   2074 
   2075 static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
   2076                                       Label* stack_overflow) {
   2077   // ----------- S t a t e -------------
   2078   //  -- x0 : actual number of arguments
   2079   //  -- x1 : function (passed through to callee)
   2080   //  -- x2 : expected number of arguments
   2081   //  -- x3 : new target (passed through to callee)
   2082   // -----------------------------------
   2083   // Check the stack for overflow.
   2084   // We are not trying to catch interruptions (e.g. debug break and
   2085   // preemption) here, so the "real stack limit" is checked.
   2086   Label enough_stack_space;
   2087   __ LoadRoot(x10, Heap::kRealStackLimitRootIndex);
   2088   // Make x10 the space we have left. The stack might already be overflowed
   2089   // here which will cause x10 to become negative.
   2090   __ Sub(x10, jssp, x10);
   2091   // Check if the arguments will overflow the stack.
   2092   __ Cmp(x10, Operand(x2, LSL, kPointerSizeLog2));
   2093   __ B(le, stack_overflow);
   2094 }
   2095 
   2096 
   2097 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
   2098   __ SmiTag(x10, x0);
   2099   __ Mov(x11, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
   2100   __ Push(lr, fp);
   2101   __ Push(x11, x1, x10);
   2102   __ Add(fp, jssp,
   2103          StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize);
   2104 }
   2105 
   2106 
   2107 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
   2108   // ----------- S t a t e -------------
   2109   //  -- x0 : result being passed through
   2110   // -----------------------------------
   2111   // Get the number of arguments passed (as a smi), tear down the frame and
   2112   // then drop the parameters and the receiver.
   2113   __ Ldr(x10, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
   2114                                kPointerSize)));
   2115   __ Mov(jssp, fp);
   2116   __ Pop(fp, lr);
   2117   __ DropBySMI(x10, kXRegSize);
   2118   __ Drop(1);
   2119 }
   2120 
   2121 
   2122 // static
   2123 void Builtins::Generate_Apply(MacroAssembler* masm) {
   2124   // ----------- S t a t e -------------
   2125   //  -- x0      : argumentsList
   2126   //  -- x1      : target
   2127   //  -- x3      : new.target (checked to be constructor or undefined)
   2128   //  -- jssp[0] : thisArgument
   2129   // -----------------------------------
   2130 
   2131   Register arguments_list = x0;
   2132   Register target = x1;
   2133   Register new_target = x3;
   2134 
   2135   Register args = x0;
   2136   Register len = x2;
   2137 
   2138   // Create the list of arguments from the array-like argumentsList.
   2139   {
   2140     Label create_arguments, create_array, create_runtime, done_create;
   2141     __ JumpIfSmi(arguments_list, &create_runtime);
   2142 
   2143     // Load native context.
   2144     Register native_context = x4;
   2145     __ Ldr(native_context, NativeContextMemOperand());
   2146 
   2147     // Load the map of argumentsList.
   2148     Register arguments_list_map = x2;
   2149     __ Ldr(arguments_list_map,
   2150            FieldMemOperand(arguments_list, HeapObject::kMapOffset));
   2151 
   2152     // Check if argumentsList is an (unmodified) arguments object.
   2153     __ Ldr(x10, ContextMemOperand(native_context,
   2154                                   Context::SLOPPY_ARGUMENTS_MAP_INDEX));
   2155     __ Ldr(x11, ContextMemOperand(native_context,
   2156                                   Context::STRICT_ARGUMENTS_MAP_INDEX));
   2157     __ Cmp(arguments_list_map, x10);
   2158     __ Ccmp(arguments_list_map, x11, ZFlag, ne);
   2159     __ B(eq, &create_arguments);
   2160 
   2161     // Check if argumentsList is a fast JSArray.
   2162     __ CompareInstanceType(arguments_list_map, native_context, JS_ARRAY_TYPE);
   2163     __ B(eq, &create_array);
   2164 
   2165     // Ask the runtime to create the list (actually a FixedArray).
   2166     __ Bind(&create_runtime);
   2167     {
   2168       FrameScope scope(masm, StackFrame::INTERNAL);
   2169       __ Push(target, new_target, arguments_list);
   2170       __ CallRuntime(Runtime::kCreateListFromArrayLike);
   2171       __ Pop(new_target, target);
   2172       __ Ldrsw(len, UntagSmiFieldMemOperand(arguments_list,
   2173                                             FixedArray::kLengthOffset));
   2174     }
   2175     __ B(&done_create);
   2176 
   2177     // Try to create the list from an arguments object.
   2178     __ Bind(&create_arguments);
   2179     __ Ldrsw(len, UntagSmiFieldMemOperand(arguments_list,
   2180                                           JSArgumentsObject::kLengthOffset));
   2181     __ Ldr(x10, FieldMemOperand(arguments_list, JSObject::kElementsOffset));
   2182     __ Ldrsw(x11, UntagSmiFieldMemOperand(x10, FixedArray::kLengthOffset));
   2183     __ CompareAndBranch(len, x11, ne, &create_runtime);
   2184     __ Mov(args, x10);
   2185     __ B(&done_create);
   2186 
   2187     // Try to create the list from a JSArray object.
   2188     __ Bind(&create_array);
   2189     __ Ldr(x10, FieldMemOperand(arguments_list_map, Map::kBitField2Offset));
   2190     __ DecodeField<Map::ElementsKindBits>(x10);
   2191     STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
   2192     STATIC_ASSERT(FAST_ELEMENTS == 2);
   2193     // Branch for anything that's not FAST_{SMI_}ELEMENTS.
   2194     __ TestAndBranchIfAnySet(x10, ~FAST_ELEMENTS, &create_runtime);
   2195     __ Ldrsw(len,
   2196              UntagSmiFieldMemOperand(arguments_list, JSArray::kLengthOffset));
   2197     __ Ldr(args, FieldMemOperand(arguments_list, JSArray::kElementsOffset));
   2198 
   2199     __ Bind(&done_create);
   2200   }
   2201 
   2202   // Check for stack overflow.
   2203   {
   2204     // Check the stack for overflow. We are not trying to catch interruptions
   2205     // (i.e. debug break and preemption) here, so check the "real stack limit".
   2206     Label done;
   2207     __ LoadRoot(x10, Heap::kRealStackLimitRootIndex);
   2208     // Make x10 the space we have left. The stack might already be overflowed
   2209     // here which will cause x10 to become negative.
   2210     __ Sub(x10, masm->StackPointer(), x10);
   2211     // Check if the arguments will overflow the stack.
   2212     __ Cmp(x10, Operand(len, LSL, kPointerSizeLog2));
   2213     __ B(gt, &done);  // Signed comparison.
   2214     __ TailCallRuntime(Runtime::kThrowStackOverflow);
   2215     __ Bind(&done);
   2216   }
   2217 
   2218   // ----------- S t a t e -------------
   2219   //  -- x0      : args (a FixedArray built from argumentsList)
   2220   //  -- x1      : target
   2221   //  -- x2      : len (number of elements to push from args)
   2222   //  -- x3      : new.target (checked to be constructor or undefined)
   2223   //  -- jssp[0] : thisArgument
   2224   // -----------------------------------
   2225 
   2226   // Push arguments onto the stack (thisArgument is already on the stack).
   2227   {
   2228     Label done, loop;
   2229     Register src = x4;
   2230 
   2231     __ Add(src, args, FixedArray::kHeaderSize - kHeapObjectTag);
   2232     __ Mov(x0, len);  // The 'len' argument for Call() or Construct().
   2233     __ Cbz(len, &done);
   2234     __ Claim(len);
   2235     __ Bind(&loop);
   2236     __ Sub(len, len, 1);
   2237     __ Ldr(x10, MemOperand(src, kPointerSize, PostIndex));
   2238     __ Poke(x10, Operand(len, LSL, kPointerSizeLog2));
   2239     __ Cbnz(len, &loop);
   2240     __ Bind(&done);
   2241   }
   2242 
   2243   // ----------- S t a t e -------------
   2244   //  -- x0              : argument count (len)
   2245   //  -- x1              : target
   2246   //  -- x3              : new.target (checked to be constructor or undefined)
   2247   //  -- jssp[0]         : args[len-1]
   2248   //  -- jssp[8]         : args[len-2]
   2249   //      ...            :  ...
   2250   //  -- jssp[8*(len-2)] : args[1]
   2251   //  -- jssp[8*(len-1)] : args[0]
   2252   // -----------------------------------
   2253 
   2254   // Dispatch to Call or Construct depending on whether new.target is undefined.
   2255   {
   2256     __ CompareRoot(new_target, Heap::kUndefinedValueRootIndex);
   2257     __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET, eq);
   2258     __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
   2259   }
   2260 }
   2261 
   2262 namespace {
   2263 
   2264 // Drops top JavaScript frame and an arguments adaptor frame below it (if
   2265 // present) preserving all the arguments prepared for current call.
   2266 // Does nothing if debugger is currently active.
   2267 // ES6 14.6.3. PrepareForTailCall
   2268 //
   2269 // Stack structure for the function g() tail calling f():
   2270 //
   2271 // ------- Caller frame: -------
   2272 // |  ...
   2273 // |  g()'s arg M
   2274 // |  ...
   2275 // |  g()'s arg 1
   2276 // |  g()'s receiver arg
   2277 // |  g()'s caller pc
   2278 // ------- g()'s frame: -------
   2279 // |  g()'s caller fp      <- fp
   2280 // |  g()'s context
   2281 // |  function pointer: g
   2282 // |  -------------------------
   2283 // |  ...
   2284 // |  ...
   2285 // |  f()'s arg N
   2286 // |  ...
   2287 // |  f()'s arg 1
   2288 // |  f()'s receiver arg   <- sp (f()'s caller pc is not on the stack yet!)
   2289 // ----------------------
   2290 //
   2291 void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
   2292                         Register scratch1, Register scratch2,
   2293                         Register scratch3) {
   2294   DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
   2295   Comment cmnt(masm, "[ PrepareForTailCall");
   2296 
   2297   // Prepare for tail call only if ES2015 tail call elimination is enabled.
   2298   Label done;
   2299   ExternalReference is_tail_call_elimination_enabled =
   2300       ExternalReference::is_tail_call_elimination_enabled_address(
   2301           masm->isolate());
   2302   __ Mov(scratch1, Operand(is_tail_call_elimination_enabled));
   2303   __ Ldrb(scratch1, MemOperand(scratch1));
   2304   __ Cmp(scratch1, Operand(0));
   2305   __ B(eq, &done);
   2306 
   2307   // Drop possible interpreter handler/stub frame.
   2308   {
   2309     Label no_interpreter_frame;
   2310     __ Ldr(scratch3,
   2311            MemOperand(fp, CommonFrameConstants::kContextOrFrameTypeOffset));
   2312     __ Cmp(scratch3, Operand(Smi::FromInt(StackFrame::STUB)));
   2313     __ B(ne, &no_interpreter_frame);
   2314     __ Ldr(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
   2315     __ bind(&no_interpreter_frame);
   2316   }
   2317 
   2318   // Check if next frame is an arguments adaptor frame.
   2319   Register caller_args_count_reg = scratch1;
   2320   Label no_arguments_adaptor, formal_parameter_count_loaded;
   2321   __ Ldr(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
   2322   __ Ldr(scratch3,
   2323          MemOperand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset));
   2324   __ Cmp(scratch3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
   2325   __ B(ne, &no_arguments_adaptor);
   2326 
   2327   // Drop current frame and load arguments count from arguments adaptor frame.
   2328   __ mov(fp, scratch2);
   2329   __ Ldr(caller_args_count_reg,
   2330          MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
   2331   __ SmiUntag(caller_args_count_reg);
   2332   __ B(&formal_parameter_count_loaded);
   2333 
   2334   __ bind(&no_arguments_adaptor);
   2335   // Load caller's formal parameter count
   2336   __ Ldr(scratch1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
   2337   __ Ldr(scratch1,
   2338          FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
   2339   __ Ldrsw(caller_args_count_reg,
   2340            FieldMemOperand(scratch1,
   2341                            SharedFunctionInfo::kFormalParameterCountOffset));
   2342   __ bind(&formal_parameter_count_loaded);
   2343 
   2344   ParameterCount callee_args_count(args_reg);
   2345   __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
   2346                         scratch3);
   2347   __ bind(&done);
   2348 }
   2349 }  // namespace
   2350 
   2351 // static
   2352 void Builtins::Generate_CallFunction(MacroAssembler* masm,
   2353                                      ConvertReceiverMode mode,
   2354                                      TailCallMode tail_call_mode) {
   2355   ASM_LOCATION("Builtins::Generate_CallFunction");
   2356   // ----------- S t a t e -------------
   2357   //  -- x0 : the number of arguments (not including the receiver)
   2358   //  -- x1 : the function to call (checked to be a JSFunction)
   2359   // -----------------------------------
   2360   __ AssertFunction(x1);
   2361 
   2362   // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
   2363   // Check that function is not a "classConstructor".
   2364   Label class_constructor;
   2365   __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
   2366   __ Ldr(w3, FieldMemOperand(x2, SharedFunctionInfo::kCompilerHintsOffset));
   2367   __ TestAndBranchIfAnySet(
   2368       w3, (1 << SharedFunctionInfo::kIsDefaultConstructor) |
   2369               (1 << SharedFunctionInfo::kIsSubclassConstructor) |
   2370               (1 << SharedFunctionInfo::kIsBaseConstructor),
   2371       &class_constructor);
   2372 
   2373   // Enter the context of the function; ToObject has to run in the function
   2374   // context, and we also need to take the global proxy from the function
   2375   // context in case of conversion.
   2376   __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset));
   2377   // We need to convert the receiver for non-native sloppy mode functions.
   2378   Label done_convert;
   2379   __ TestAndBranchIfAnySet(w3,
   2380                            (1 << SharedFunctionInfo::kNative) |
   2381                                (1 << SharedFunctionInfo::kStrictModeFunction),
   2382                            &done_convert);
   2383   {
   2384     // ----------- S t a t e -------------
   2385     //  -- x0 : the number of arguments (not including the receiver)
   2386     //  -- x1 : the function to call (checked to be a JSFunction)
   2387     //  -- x2 : the shared function info.
   2388     //  -- cp : the function context.
   2389     // -----------------------------------
   2390 
   2391     if (mode == ConvertReceiverMode::kNullOrUndefined) {
   2392       // Patch receiver to global proxy.
   2393       __ LoadGlobalProxy(x3);
   2394     } else {
   2395       Label convert_to_object, convert_receiver;
   2396       __ Peek(x3, Operand(x0, LSL, kXRegSizeLog2));
   2397       __ JumpIfSmi(x3, &convert_to_object);
   2398       STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
   2399       __ CompareObjectType(x3, x4, x4, FIRST_JS_RECEIVER_TYPE);
   2400       __ B(hs, &done_convert);
   2401       if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
   2402         Label convert_global_proxy;
   2403         __ JumpIfRoot(x3, Heap::kUndefinedValueRootIndex,
   2404                       &convert_global_proxy);
   2405         __ JumpIfNotRoot(x3, Heap::kNullValueRootIndex, &convert_to_object);
   2406         __ Bind(&convert_global_proxy);
   2407         {
   2408           // Patch receiver to global proxy.
   2409           __ LoadGlobalProxy(x3);
   2410         }
   2411         __ B(&convert_receiver);
   2412       }
   2413       __ Bind(&convert_to_object);
   2414       {
   2415         // Convert receiver using ToObject.
   2416         // TODO(bmeurer): Inline the allocation here to avoid building the frame
   2417         // in the fast case? (fall back to AllocateInNewSpace?)
   2418         FrameScope scope(masm, StackFrame::INTERNAL);
   2419         __ SmiTag(x0);
   2420         __ Push(x0, x1);
   2421         __ Mov(x0, x3);
   2422         ToObjectStub stub(masm->isolate());
   2423         __ CallStub(&stub);
   2424         __ Mov(x3, x0);
   2425         __ Pop(x1, x0);
   2426         __ SmiUntag(x0);
   2427       }
   2428       __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
   2429       __ Bind(&convert_receiver);
   2430     }
   2431     __ Poke(x3, Operand(x0, LSL, kXRegSizeLog2));
   2432   }
   2433   __ Bind(&done_convert);
   2434 
   2435   // ----------- S t a t e -------------
   2436   //  -- x0 : the number of arguments (not including the receiver)
   2437   //  -- x1 : the function to call (checked to be a JSFunction)
   2438   //  -- x2 : the shared function info.
   2439   //  -- cp : the function context.
   2440   // -----------------------------------
   2441 
   2442   if (tail_call_mode == TailCallMode::kAllow) {
   2443     PrepareForTailCall(masm, x0, x3, x4, x5);
   2444   }
   2445 
   2446   __ Ldrsw(
   2447       x2, FieldMemOperand(x2, SharedFunctionInfo::kFormalParameterCountOffset));
   2448   ParameterCount actual(x0);
   2449   ParameterCount expected(x2);
   2450   __ InvokeFunctionCode(x1, no_reg, expected, actual, JUMP_FUNCTION,
   2451                         CheckDebugStepCallWrapper());
   2452 
   2453   // The function is a "classConstructor", need to raise an exception.
   2454   __ bind(&class_constructor);
   2455   {
   2456     FrameScope frame(masm, StackFrame::INTERNAL);
   2457     __ Push(x1);
   2458     __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
   2459   }
   2460 }
   2461 
   2462 
   2463 namespace {
   2464 
   2465 void Generate_PushBoundArguments(MacroAssembler* masm) {
   2466   // ----------- S t a t e -------------
   2467   //  -- x0 : the number of arguments (not including the receiver)
   2468   //  -- x1 : target (checked to be a JSBoundFunction)
   2469   //  -- x3 : new.target (only in case of [[Construct]])
   2470   // -----------------------------------
   2471 
   2472   // Load [[BoundArguments]] into x2 and length of that into x4.
   2473   Label no_bound_arguments;
   2474   __ Ldr(x2, FieldMemOperand(x1, JSBoundFunction::kBoundArgumentsOffset));
   2475   __ Ldrsw(x4, UntagSmiFieldMemOperand(x2, FixedArray::kLengthOffset));
   2476   __ Cmp(x4, 0);
   2477   __ B(eq, &no_bound_arguments);
   2478   {
   2479     // ----------- S t a t e -------------
   2480     //  -- x0 : the number of arguments (not including the receiver)
   2481     //  -- x1 : target (checked to be a JSBoundFunction)
   2482     //  -- x2 : the [[BoundArguments]] (implemented as FixedArray)
   2483     //  -- x3 : new.target (only in case of [[Construct]])
   2484     //  -- x4 : the number of [[BoundArguments]]
   2485     // -----------------------------------
   2486 
   2487     // Reserve stack space for the [[BoundArguments]].
   2488     {
   2489       Label done;
   2490       __ Claim(x4);
   2491       // Check the stack for overflow. We are not trying to catch interruptions
   2492       // (i.e. debug break and preemption) here, so check the "real stack
   2493       // limit".
   2494       __ CompareRoot(jssp, Heap::kRealStackLimitRootIndex);
   2495       __ B(gt, &done);  // Signed comparison.
   2496       // Restore the stack pointer.
   2497       __ Drop(x4);
   2498       {
   2499         FrameScope scope(masm, StackFrame::MANUAL);
   2500         __ EnterFrame(StackFrame::INTERNAL);
   2501         __ CallRuntime(Runtime::kThrowStackOverflow);
   2502       }
   2503       __ Bind(&done);
   2504     }
   2505 
   2506     // Relocate arguments down the stack.
   2507     {
   2508       Label loop, done_loop;
   2509       __ Mov(x5, 0);
   2510       __ Bind(&loop);
   2511       __ Cmp(x5, x0);
   2512       __ B(gt, &done_loop);
   2513       __ Peek(x10, Operand(x4, LSL, kPointerSizeLog2));
   2514       __ Poke(x10, Operand(x5, LSL, kPointerSizeLog2));
   2515       __ Add(x4, x4, 1);
   2516       __ Add(x5, x5, 1);
   2517       __ B(&loop);
   2518       __ Bind(&done_loop);
   2519     }
   2520 
   2521     // Copy [[BoundArguments]] to the stack (below the arguments).
   2522     {
   2523       Label loop;
   2524       __ Ldrsw(x4, UntagSmiFieldMemOperand(x2, FixedArray::kLengthOffset));
   2525       __ Add(x2, x2, FixedArray::kHeaderSize - kHeapObjectTag);
   2526       __ Bind(&loop);
   2527       __ Sub(x4, x4, 1);
   2528       __ Ldr(x10, MemOperand(x2, x4, LSL, kPointerSizeLog2));
   2529       __ Poke(x10, Operand(x0, LSL, kPointerSizeLog2));
   2530       __ Add(x0, x0, 1);
   2531       __ Cmp(x4, 0);
   2532       __ B(gt, &loop);
   2533     }
   2534   }
   2535   __ Bind(&no_bound_arguments);
   2536 }
   2537 
   2538 }  // namespace
   2539 
   2540 
   2541 // static
   2542 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
   2543                                               TailCallMode tail_call_mode) {
   2544   // ----------- S t a t e -------------
   2545   //  -- x0 : the number of arguments (not including the receiver)
   2546   //  -- x1 : the function to call (checked to be a JSBoundFunction)
   2547   // -----------------------------------
   2548   __ AssertBoundFunction(x1);
   2549 
   2550   if (tail_call_mode == TailCallMode::kAllow) {
   2551     PrepareForTailCall(masm, x0, x3, x4, x5);
   2552   }
   2553 
   2554   // Patch the receiver to [[BoundThis]].
   2555   __ Ldr(x10, FieldMemOperand(x1, JSBoundFunction::kBoundThisOffset));
   2556   __ Poke(x10, Operand(x0, LSL, kPointerSizeLog2));
   2557 
   2558   // Push the [[BoundArguments]] onto the stack.
   2559   Generate_PushBoundArguments(masm);
   2560 
   2561   // Call the [[BoundTargetFunction]] via the Call builtin.
   2562   __ Ldr(x1, FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset));
   2563   __ Mov(x10,
   2564          ExternalReference(Builtins::kCall_ReceiverIsAny, masm->isolate()));
   2565   __ Ldr(x11, MemOperand(x10));
   2566   __ Add(x12, x11, Code::kHeaderSize - kHeapObjectTag);
   2567   __ Br(x12);
   2568 }
   2569 
   2570 
   2571 // static
   2572 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
   2573                              TailCallMode tail_call_mode) {
   2574   // ----------- S t a t e -------------
   2575   //  -- x0 : the number of arguments (not including the receiver)
   2576   //  -- x1 : the target to call (can be any Object).
   2577   // -----------------------------------
   2578 
   2579   Label non_callable, non_function, non_smi;
   2580   __ JumpIfSmi(x1, &non_callable);
   2581   __ Bind(&non_smi);
   2582   __ CompareObjectType(x1, x4, x5, JS_FUNCTION_TYPE);
   2583   __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
   2584           RelocInfo::CODE_TARGET, eq);
   2585   __ Cmp(x5, JS_BOUND_FUNCTION_TYPE);
   2586   __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
   2587           RelocInfo::CODE_TARGET, eq);
   2588 
   2589   // Check if target has a [[Call]] internal method.
   2590   __ Ldrb(x4, FieldMemOperand(x4, Map::kBitFieldOffset));
   2591   __ TestAndBranchIfAllClear(x4, 1 << Map::kIsCallable, &non_callable);
   2592 
   2593   __ Cmp(x5, JS_PROXY_TYPE);
   2594   __ B(ne, &non_function);
   2595 
   2596   // 0. Prepare for tail call if necessary.
   2597   if (tail_call_mode == TailCallMode::kAllow) {
   2598     PrepareForTailCall(masm, x0, x3, x4, x5);
   2599   }
   2600 
   2601   // 1. Runtime fallback for Proxy [[Call]].
   2602   __ Push(x1);
   2603   // Increase the arguments size to include the pushed function and the
   2604   // existing receiver on the stack.
   2605   __ Add(x0, x0, Operand(2));
   2606   // Tail-call to the runtime.
   2607   __ JumpToExternalReference(
   2608       ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
   2609 
   2610   // 2. Call to something else, which might have a [[Call]] internal method (if
   2611   // not we raise an exception).
   2612   __ Bind(&non_function);
   2613   // Overwrite the original receiver with the (original) target.
   2614   __ Poke(x1, Operand(x0, LSL, kXRegSizeLog2));
   2615   // Let the "call_as_function_delegate" take care of the rest.
   2616   __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, x1);
   2617   __ Jump(masm->isolate()->builtins()->CallFunction(
   2618               ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
   2619           RelocInfo::CODE_TARGET);
   2620 
   2621   // 3. Call to something that is not callable.
   2622   __ bind(&non_callable);
   2623   {
   2624     FrameScope scope(masm, StackFrame::INTERNAL);
   2625     __ Push(x1);
   2626     __ CallRuntime(Runtime::kThrowCalledNonCallable);
   2627   }
   2628 }
   2629 
   2630 
   2631 // static
   2632 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
   2633   // ----------- S t a t e -------------
   2634   //  -- x0 : the number of arguments (not including the receiver)
   2635   //  -- x1 : the constructor to call (checked to be a JSFunction)
   2636   //  -- x3 : the new target (checked to be a constructor)
   2637   // -----------------------------------
   2638   __ AssertFunction(x1);
   2639 
   2640   // Calling convention for function specific ConstructStubs require
   2641   // x2 to contain either an AllocationSite or undefined.
   2642   __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
   2643 
   2644   // Tail call to the function-specific construct stub (still in the caller
   2645   // context at this point).
   2646   __ Ldr(x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
   2647   __ Ldr(x4, FieldMemOperand(x4, SharedFunctionInfo::kConstructStubOffset));
   2648   __ Add(x4, x4, Code::kHeaderSize - kHeapObjectTag);
   2649   __ Br(x4);
   2650 }
   2651 
   2652 
   2653 // static
   2654 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
   2655   // ----------- S t a t e -------------
   2656   //  -- x0 : the number of arguments (not including the receiver)
   2657   //  -- x1 : the function to call (checked to be a JSBoundFunction)
   2658   //  -- x3 : the new target (checked to be a constructor)
   2659   // -----------------------------------
   2660   __ AssertBoundFunction(x1);
   2661 
   2662   // Push the [[BoundArguments]] onto the stack.
   2663   Generate_PushBoundArguments(masm);
   2664 
   2665   // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
   2666   {
   2667     Label done;
   2668     __ Cmp(x1, x3);
   2669     __ B(ne, &done);
   2670     __ Ldr(x3,
   2671            FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset));
   2672     __ Bind(&done);
   2673   }
   2674 
   2675   // Construct the [[BoundTargetFunction]] via the Construct builtin.
   2676   __ Ldr(x1, FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset));
   2677   __ Mov(x10, ExternalReference(Builtins::kConstruct, masm->isolate()));
   2678   __ Ldr(x11, MemOperand(x10));
   2679   __ Add(x12, x11, Code::kHeaderSize - kHeapObjectTag);
   2680   __ Br(x12);
   2681 }
   2682 
   2683 
   2684 // static
   2685 void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
   2686   // ----------- S t a t e -------------
   2687   //  -- x0 : the number of arguments (not including the receiver)
   2688   //  -- x1 : the constructor to call (checked to be a JSProxy)
   2689   //  -- x3 : the new target (either the same as the constructor or
   2690   //          the JSFunction on which new was invoked initially)
   2691   // -----------------------------------
   2692 
   2693   // Call into the Runtime for Proxy [[Construct]].
   2694   __ Push(x1);
   2695   __ Push(x3);
   2696   // Include the pushed new_target, constructor and the receiver.
   2697   __ Add(x0, x0, 3);
   2698   // Tail-call to the runtime.
   2699   __ JumpToExternalReference(
   2700       ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
   2701 }
   2702 
   2703 
   2704 // static
   2705 void Builtins::Generate_Construct(MacroAssembler* masm) {
   2706   // ----------- S t a t e -------------
   2707   //  -- x0 : the number of arguments (not including the receiver)
   2708   //  -- x1 : the constructor to call (can be any Object)
   2709   //  -- x3 : the new target (either the same as the constructor or
   2710   //          the JSFunction on which new was invoked initially)
   2711   // -----------------------------------
   2712 
   2713   // Check if target is a Smi.
   2714   Label non_constructor;
   2715   __ JumpIfSmi(x1, &non_constructor);
   2716 
   2717   // Dispatch based on instance type.
   2718   __ CompareObjectType(x1, x4, x5, JS_FUNCTION_TYPE);
   2719   __ Jump(masm->isolate()->builtins()->ConstructFunction(),
   2720           RelocInfo::CODE_TARGET, eq);
   2721 
   2722   // Check if target has a [[Construct]] internal method.
   2723   __ Ldrb(x2, FieldMemOperand(x4, Map::kBitFieldOffset));
   2724   __ TestAndBranchIfAllClear(x2, 1 << Map::kIsConstructor, &non_constructor);
   2725 
   2726   // Only dispatch to bound functions after checking whether they are
   2727   // constructors.
   2728   __ Cmp(x5, JS_BOUND_FUNCTION_TYPE);
   2729   __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(),
   2730           RelocInfo::CODE_TARGET, eq);
   2731 
   2732   // Only dispatch to proxies after checking whether they are constructors.
   2733   __ Cmp(x5, JS_PROXY_TYPE);
   2734   __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET,
   2735           eq);
   2736 
   2737   // Called Construct on an exotic Object with a [[Construct]] internal method.
   2738   {
   2739     // Overwrite the original receiver with the (original) target.
   2740     __ Poke(x1, Operand(x0, LSL, kXRegSizeLog2));
   2741     // Let the "call_as_constructor_delegate" take care of the rest.
   2742     __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, x1);
   2743     __ Jump(masm->isolate()->builtins()->CallFunction(),
   2744             RelocInfo::CODE_TARGET);
   2745   }
   2746 
   2747   // Called Construct on an Object that doesn't have a [[Construct]] internal
   2748   // method.
   2749   __ bind(&non_constructor);
   2750   __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
   2751           RelocInfo::CODE_TARGET);
   2752 }
   2753 
   2754 // static
   2755 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
   2756   ASM_LOCATION("Builtins::Generate_AllocateInNewSpace");
   2757   // ----------- S t a t e -------------
   2758   //  -- x1 : requested object size (untagged)
   2759   //  -- lr : return address
   2760   // -----------------------------------
   2761   __ SmiTag(x1);
   2762   __ Push(x1);
   2763   __ Move(cp, Smi::FromInt(0));
   2764   __ TailCallRuntime(Runtime::kAllocateInNewSpace);
   2765 }
   2766 
   2767 // static
   2768 void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
   2769   ASM_LOCATION("Builtins::Generate_AllocateInOldSpace");
   2770   // ----------- S t a t e -------------
   2771   //  -- x1 : requested object size (untagged)
   2772   //  -- lr : return address
   2773   // -----------------------------------
   2774   __ SmiTag(x1);
   2775   __ Move(x2, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
   2776   __ Push(x1, x2);
   2777   __ Move(cp, Smi::FromInt(0));
   2778   __ TailCallRuntime(Runtime::kAllocateInTargetSpace);
   2779 }
   2780 
   2781 // static
   2782 void Builtins::Generate_StringToNumber(MacroAssembler* masm) {
   2783   // The StringToNumber stub takes one argument in x0.
   2784   __ AssertString(x0);
   2785 
   2786   // Check if string has a cached array index.
   2787   Label runtime;
   2788   __ Ldr(x2, FieldMemOperand(x0, String::kHashFieldOffset));
   2789   __ Tst(x2, Operand(String::kContainsCachedArrayIndexMask));
   2790   __ B(ne, &runtime);
   2791   __ IndexFromHash(x2, x0);
   2792   __ Ret();
   2793 
   2794   __ Bind(&runtime);
   2795   {
   2796     FrameScope frame(masm, StackFrame::INTERNAL);
   2797     // Push argument.
   2798     __ Push(x0);
   2799     // We cannot use a tail call here because this builtin can also be called
   2800     // from wasm.
   2801     __ CallRuntime(Runtime::kStringToNumber);
   2802   }
   2803   __ Ret();
   2804 }
   2805 
   2806 // static
   2807 void Builtins::Generate_ToNumber(MacroAssembler* masm) {
   2808   // The ToNumber stub takes one argument in x0.
   2809   Label not_smi;
   2810   __ JumpIfNotSmi(x0, &not_smi);
   2811   __ Ret();
   2812   __ Bind(&not_smi);
   2813 
   2814   Label not_heap_number;
   2815   __ CompareObjectType(x0, x1, x1, HEAP_NUMBER_TYPE);
   2816   // x0: receiver
   2817   // x1: receiver instance type
   2818   __ B(ne, &not_heap_number);
   2819   __ Ret();
   2820   __ Bind(&not_heap_number);
   2821 
   2822   __ Jump(masm->isolate()->builtins()->NonNumberToNumber(),
   2823           RelocInfo::CODE_TARGET);
   2824 }
   2825 
   2826 // static
   2827 void Builtins::Generate_NonNumberToNumber(MacroAssembler* masm) {
   2828   // The NonNumberToNumber stub takes one argument in x0.
   2829   __ AssertNotNumber(x0);
   2830 
   2831   Label not_string;
   2832   __ CompareObjectType(x0, x1, x1, FIRST_NONSTRING_TYPE);
   2833   // x0: receiver
   2834   // x1: receiver instance type
   2835   __ B(hs, &not_string);
   2836   __ Jump(masm->isolate()->builtins()->StringToNumber(),
   2837           RelocInfo::CODE_TARGET);
   2838   __ Bind(&not_string);
   2839 
   2840   Label not_oddball;
   2841   __ Cmp(x1, ODDBALL_TYPE);
   2842   __ B(ne, &not_oddball);
   2843   __ Ldr(x0, FieldMemOperand(x0, Oddball::kToNumberOffset));
   2844   __ Ret();
   2845   __ Bind(&not_oddball);
   2846   {
   2847     FrameScope frame(masm, StackFrame::INTERNAL);
   2848     // Push argument.
   2849     __ Push(x0);
   2850     // We cannot use a tail call here because this builtin can also be called
   2851     // from wasm.
   2852     __ CallRuntime(Runtime::kToNumber);
   2853   }
   2854   __ Ret();
   2855 }
   2856 
   2857 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
   2858   ASM_LOCATION("Builtins::Generate_ArgumentsAdaptorTrampoline");
   2859   // ----------- S t a t e -------------
   2860   //  -- x0 : actual number of arguments
   2861   //  -- x1 : function (passed through to callee)
   2862   //  -- x2 : expected number of arguments
   2863   //  -- x3 : new target (passed through to callee)
   2864   // -----------------------------------
   2865 
   2866   Register argc_actual = x0;  // Excluding the receiver.
   2867   Register argc_expected = x2;  // Excluding the receiver.
   2868   Register function = x1;
   2869   Register code_entry = x10;
   2870 
   2871   Label invoke, dont_adapt_arguments, stack_overflow;
   2872 
   2873   Label enough, too_few;
   2874   __ Cmp(argc_actual, argc_expected);
   2875   __ B(lt, &too_few);
   2876   __ Cmp(argc_expected, SharedFunctionInfo::kDontAdaptArgumentsSentinel);
   2877   __ B(eq, &dont_adapt_arguments);
   2878 
   2879   {  // Enough parameters: actual >= expected
   2880     EnterArgumentsAdaptorFrame(masm);
   2881     ArgumentAdaptorStackCheck(masm, &stack_overflow);
   2882 
   2883     Register copy_start = x10;
   2884     Register copy_end = x11;
   2885     Register copy_to = x12;
   2886     Register scratch1 = x13, scratch2 = x14;
   2887 
   2888     __ Lsl(scratch2, argc_expected, kPointerSizeLog2);
   2889 
   2890     // Adjust for fp, lr, and the receiver.
   2891     __ Add(copy_start, fp, 3 * kPointerSize);
   2892     __ Add(copy_start, copy_start, Operand(argc_actual, LSL, kPointerSizeLog2));
   2893     __ Sub(copy_end, copy_start, scratch2);
   2894     __ Sub(copy_end, copy_end, kPointerSize);
   2895     __ Mov(copy_to, jssp);
   2896 
   2897     // Claim space for the arguments, the receiver, and one extra slot.
   2898     // The extra slot ensures we do not write under jssp. It will be popped
   2899     // later.
   2900     __ Add(scratch1, scratch2, 2 * kPointerSize);
   2901     __ Claim(scratch1, 1);
   2902 
   2903     // Copy the arguments (including the receiver) to the new stack frame.
   2904     Label copy_2_by_2;
   2905     __ Bind(&copy_2_by_2);
   2906     __ Ldp(scratch1, scratch2,
   2907            MemOperand(copy_start, - 2 * kPointerSize, PreIndex));
   2908     __ Stp(scratch1, scratch2,
   2909            MemOperand(copy_to, - 2 * kPointerSize, PreIndex));
   2910     __ Cmp(copy_start, copy_end);
   2911     __ B(hi, &copy_2_by_2);
   2912 
   2913     // Correct the space allocated for the extra slot.
   2914     __ Drop(1);
   2915 
   2916     __ B(&invoke);
   2917   }
   2918 
   2919   {  // Too few parameters: Actual < expected
   2920     __ Bind(&too_few);
   2921 
   2922     Register copy_from = x10;
   2923     Register copy_end = x11;
   2924     Register copy_to = x12;
   2925     Register scratch1 = x13, scratch2 = x14;
   2926 
   2927     EnterArgumentsAdaptorFrame(masm);
   2928     ArgumentAdaptorStackCheck(masm, &stack_overflow);
   2929 
   2930     __ Lsl(scratch2, argc_expected, kPointerSizeLog2);
   2931     __ Lsl(argc_actual, argc_actual, kPointerSizeLog2);
   2932 
   2933     // Adjust for fp, lr, and the receiver.
   2934     __ Add(copy_from, fp, 3 * kPointerSize);
   2935     __ Add(copy_from, copy_from, argc_actual);
   2936     __ Mov(copy_to, jssp);
   2937     __ Sub(copy_end, copy_to, 1 * kPointerSize);   // Adjust for the receiver.
   2938     __ Sub(copy_end, copy_end, argc_actual);
   2939 
   2940     // Claim space for the arguments, the receiver, and one extra slot.
   2941     // The extra slot ensures we do not write under jssp. It will be popped
   2942     // later.
   2943     __ Add(scratch1, scratch2, 2 * kPointerSize);
   2944     __ Claim(scratch1, 1);
   2945 
   2946     // Copy the arguments (including the receiver) to the new stack frame.
   2947     Label copy_2_by_2;
   2948     __ Bind(&copy_2_by_2);
   2949     __ Ldp(scratch1, scratch2,
   2950            MemOperand(copy_from, - 2 * kPointerSize, PreIndex));
   2951     __ Stp(scratch1, scratch2,
   2952            MemOperand(copy_to, - 2 * kPointerSize, PreIndex));
   2953     __ Cmp(copy_to, copy_end);
   2954     __ B(hi, &copy_2_by_2);
   2955 
   2956     __ Mov(copy_to, copy_end);
   2957 
   2958     // Fill the remaining expected arguments with undefined.
   2959     __ LoadRoot(scratch1, Heap::kUndefinedValueRootIndex);
   2960     __ Add(copy_end, jssp, kPointerSize);
   2961 
   2962     Label fill;
   2963     __ Bind(&fill);
   2964     __ Stp(scratch1, scratch1,
   2965            MemOperand(copy_to, - 2 * kPointerSize, PreIndex));
   2966     __ Cmp(copy_to, copy_end);
   2967     __ B(hi, &fill);
   2968 
   2969     // Correct the space allocated for the extra slot.
   2970     __ Drop(1);
   2971   }
   2972 
   2973   // Arguments have been adapted. Now call the entry point.
   2974   __ Bind(&invoke);
   2975   __ Mov(argc_actual, argc_expected);
   2976   // x0 : expected number of arguments
   2977   // x1 : function (passed through to callee)
   2978   // x3 : new target (passed through to callee)
   2979   __ Ldr(code_entry, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
   2980   __ Call(code_entry);
   2981 
   2982   // Store offset of return address for deoptimizer.
   2983   masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
   2984 
   2985   // Exit frame and return.
   2986   LeaveArgumentsAdaptorFrame(masm);
   2987   __ Ret();
   2988 
   2989   // Call the entry point without adapting the arguments.
   2990   __ Bind(&dont_adapt_arguments);
   2991   __ Ldr(code_entry, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
   2992   __ Jump(code_entry);
   2993 
   2994   __ Bind(&stack_overflow);
   2995   {
   2996     FrameScope frame(masm, StackFrame::MANUAL);
   2997     __ CallRuntime(Runtime::kThrowStackOverflow);
   2998     __ Unreachable();
   2999   }
   3000 }
   3001 
   3002 
   3003 #undef __
   3004 
   3005 }  // namespace internal
   3006 }  // namespace v8
   3007 
   3008 #endif  // V8_TARGET_ARCH_ARM
   3009